1 // Copyright 2015-2024 The Khronos Group Inc. 2 // 3 // SPDX-License-Identifier: Apache-2.0 OR MIT 4 // 5 6 // This header is generated from the Khronos Vulkan XML API Registry. 7 8 #ifndef VULKAN_FUNCS_HPP 9 #define VULKAN_FUNCS_HPP 10 11 // include-what-you-use: make sure, vulkan.hpp is used by code-completers 12 // IWYU pragma: private; include "vulkan.hpp" 13 14 namespace VULKAN_HPP_NAMESPACE 15 { 16 17 //=========================== 18 //=== COMMAND Definitions === 19 //=========================== 20 21 //=== VK_VERSION_1_0 === 22 23 template <typename Dispatch> createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Instance * pInstance,Dispatch const & d)24 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, 25 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26 VULKAN_HPP_NAMESPACE::Instance * pInstance, 27 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 28 { 29 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 30 return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), 31 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 32 reinterpret_cast<VkInstance *>( pInstance ) ) ); 33 } 34 35 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 36 template <typename Dispatch> createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)37 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( 38 const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) 39 { 40 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 41 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 42 VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" ); 43 # endif 44 45 VULKAN_HPP_NAMESPACE::Instance instance; 46 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 47 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 48 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 49 reinterpret_cast<VkInstance *>( &instance ) ) ); 50 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); 51 52 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( instance ) ); 53 } 54 55 # ifndef VULKAN_HPP_NO_SMART_HANDLE 56 template <typename Dispatch> createInstanceUnique(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)57 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( 58 const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) 59 { 60 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 61 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 62 VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" ); 63 # endif 64 65 VULKAN_HPP_NAMESPACE::Instance instance; 66 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 67 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 68 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 69 reinterpret_cast<VkInstance *>( &instance ) ) ); 70 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); 71 72 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 73 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) ); 74 } 75 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 76 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 77 78 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const79 VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 80 { 81 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 82 d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 83 } 84 85 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 86 template <typename Dispatch> destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const87 VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 88 { 89 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 90 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 91 VULKAN_HPP_ASSERT( d.vkDestroyInstance && "Function <vkDestroyInstance> requires <VK_VERSION_1_0>" ); 92 # endif 93 94 d.vkDestroyInstance( m_instance, 95 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 96 } 97 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 98 99 template <typename Dispatch> enumeratePhysicalDevices(uint32_t * pPhysicalDeviceCount,VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,Dispatch const & d) const100 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, 101 VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, 102 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 103 { 104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 105 return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) ); 106 } 107 108 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 109 template <typename PhysicalDeviceAllocator, typename Dispatch> 110 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(Dispatch const & d) const111 Instance::enumeratePhysicalDevices( Dispatch const & d ) const 112 { 113 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 114 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 115 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" ); 116 # endif 117 118 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices; 119 uint32_t physicalDeviceCount; 120 VULKAN_HPP_NAMESPACE::Result result; 121 do 122 { 123 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 124 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) 125 { 126 physicalDevices.resize( physicalDeviceCount ); 127 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 128 d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 129 } 130 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 131 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 132 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 133 if ( physicalDeviceCount < physicalDevices.size() ) 134 { 135 physicalDevices.resize( physicalDeviceCount ); 136 } 137 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); 138 } 139 140 template <typename PhysicalDeviceAllocator, 141 typename Dispatch, 142 typename std::enable_if<std::is_same<typename PhysicalDeviceAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDevice>::value, int>::type> 143 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(PhysicalDeviceAllocator & physicalDeviceAllocator,Dispatch const & d) const144 Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const 145 { 146 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 147 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 148 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" ); 149 # endif 150 151 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator ); 152 uint32_t physicalDeviceCount; 153 VULKAN_HPP_NAMESPACE::Result result; 154 do 155 { 156 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 157 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) 158 { 159 physicalDevices.resize( physicalDeviceCount ); 160 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 161 d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 162 } 163 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 164 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 165 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 166 if ( physicalDeviceCount < physicalDevices.size() ) 167 { 168 physicalDevices.resize( physicalDeviceCount ); 169 } 170 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); 171 } 172 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 173 174 template <typename Dispatch> getFeatures(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,Dispatch const & d) const175 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 176 { 177 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 178 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) ); 179 } 180 181 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 182 template <typename Dispatch> 183 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const & d) const184 PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 185 { 186 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 187 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 188 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures && "Function <vkGetPhysicalDeviceFeatures> requires <VK_VERSION_1_0>" ); 189 # endif 190 191 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; 192 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) ); 193 194 return features; 195 } 196 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 197 198 template <typename Dispatch> getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,Dispatch const & d) const199 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 200 VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, 201 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 202 { 203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 204 d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) ); 205 } 206 207 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 208 template <typename Dispatch> 209 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const210 PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 211 { 212 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 213 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 214 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties && "Function <vkGetPhysicalDeviceFormatProperties> requires <VK_VERSION_1_0>" ); 215 # endif 216 217 VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; 218 d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) ); 219 220 return formatProperties; 221 } 222 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 223 224 template <typename Dispatch> getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,Dispatch const & d) const225 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 226 VULKAN_HPP_NAMESPACE::ImageType type, 227 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 228 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 229 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 230 VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, 231 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 232 { 233 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 234 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, 235 static_cast<VkFormat>( format ), 236 static_cast<VkImageType>( type ), 237 static_cast<VkImageTiling>( tiling ), 238 static_cast<VkImageUsageFlags>( usage ), 239 static_cast<VkImageCreateFlags>( flags ), 240 reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) ); 241 } 242 243 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 244 template <typename Dispatch> 245 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,Dispatch const & d) const246 PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 247 VULKAN_HPP_NAMESPACE::ImageType type, 248 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 249 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 250 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 251 Dispatch const & d ) const 252 { 253 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 254 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 255 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties && "Function <vkGetPhysicalDeviceImageFormatProperties> requires <VK_VERSION_1_0>" ); 256 # endif 257 258 VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; 259 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 260 d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, 261 static_cast<VkFormat>( format ), 262 static_cast<VkImageType>( type ), 263 static_cast<VkImageTiling>( tiling ), 264 static_cast<VkImageUsageFlags>( usage ), 265 static_cast<VkImageCreateFlags>( flags ), 266 reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) ); 267 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); 268 269 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 270 } 271 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 272 273 template <typename Dispatch> getProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,Dispatch const & d) const274 VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, 275 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 276 { 277 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 278 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) ); 279 } 280 281 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 282 template <typename Dispatch> 283 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const & d) const284 PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 285 { 286 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 287 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 288 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties && "Function <vkGetPhysicalDeviceProperties> requires <VK_VERSION_1_0>" ); 289 # endif 290 291 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; 292 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) ); 293 294 return properties; 295 } 296 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 297 298 template <typename Dispatch> getQueueFamilyProperties(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,Dispatch const & d) const299 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, 300 VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, 301 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 302 { 303 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 304 d.vkGetPhysicalDeviceQueueFamilyProperties( 305 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) ); 306 } 307 308 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 309 template <typename QueueFamilyPropertiesAllocator, typename Dispatch> 310 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(Dispatch const & d) const311 PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const 312 { 313 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 314 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 315 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" ); 316 # endif 317 318 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties; 319 uint32_t queueFamilyPropertyCount; 320 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 321 queueFamilyProperties.resize( queueFamilyPropertyCount ); 322 d.vkGetPhysicalDeviceQueueFamilyProperties( 323 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 324 325 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 326 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 327 { 328 queueFamilyProperties.resize( queueFamilyPropertyCount ); 329 } 330 return queueFamilyProperties; 331 } 332 333 template < 334 typename QueueFamilyPropertiesAllocator, 335 typename Dispatch, 336 typename std::enable_if<std::is_same<typename QueueFamilyPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, int>::type> 337 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,Dispatch const & d) const338 PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const 339 { 340 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 341 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 342 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" ); 343 # endif 344 345 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator ); 346 uint32_t queueFamilyPropertyCount; 347 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 348 queueFamilyProperties.resize( queueFamilyPropertyCount ); 349 d.vkGetPhysicalDeviceQueueFamilyProperties( 350 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 351 352 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 353 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 354 { 355 queueFamilyProperties.resize( queueFamilyPropertyCount ); 356 } 357 return queueFamilyProperties; 358 } 359 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 360 361 template <typename Dispatch> getMemoryProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,Dispatch const & d) const362 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, 363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 364 { 365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 366 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) ); 367 } 368 369 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 370 template <typename Dispatch> 371 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const & d) const372 PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 373 { 374 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 375 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 376 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties && "Function <vkGetPhysicalDeviceMemoryProperties> requires <VK_VERSION_1_0>" ); 377 # endif 378 379 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; 380 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) ); 381 382 return memoryProperties; 383 } 384 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 385 386 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const387 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 388 { 389 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 390 return d.vkGetInstanceProcAddr( m_instance, pName ); 391 } 392 393 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 394 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const395 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 396 { 397 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 398 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 399 VULKAN_HPP_ASSERT( d.vkGetInstanceProcAddr && "Function <vkGetInstanceProcAddr> requires <VK_VERSION_1_0>" ); 400 # endif 401 402 PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() ); 403 404 return result; 405 } 406 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 407 408 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const409 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 410 { 411 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 412 return d.vkGetDeviceProcAddr( m_device, pName ); 413 } 414 415 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 416 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const417 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 418 { 419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 420 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 421 VULKAN_HPP_ASSERT( d.vkGetDeviceProcAddr && "Function <vkGetDeviceProcAddr> requires <VK_VERSION_1_0>" ); 422 # endif 423 424 PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() ); 425 426 return result; 427 } 428 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 429 430 template <typename Dispatch> createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Device * pDevice,Dispatch const & d) const431 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, 432 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 433 VULKAN_HPP_NAMESPACE::Device * pDevice, 434 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 435 { 436 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 437 return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, 438 reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), 439 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 440 reinterpret_cast<VkDevice *>( pDevice ) ) ); 441 } 442 443 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 444 template <typename Dispatch> createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const445 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( 446 const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 447 { 448 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 449 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 450 VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" ); 451 # endif 452 453 VULKAN_HPP_NAMESPACE::Device device; 454 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 455 d.vkCreateDevice( m_physicalDevice, 456 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 457 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 458 reinterpret_cast<VkDevice *>( &device ) ) ); 459 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); 460 461 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( device ) ); 462 } 463 464 # ifndef VULKAN_HPP_NO_SMART_HANDLE 465 template <typename Dispatch> 466 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type createDeviceUnique(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const467 PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, 468 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 469 Dispatch const & d ) const 470 { 471 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 472 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 473 VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" ); 474 # endif 475 476 VULKAN_HPP_NAMESPACE::Device device; 477 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 478 d.vkCreateDevice( m_physicalDevice, 479 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 480 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 481 reinterpret_cast<VkDevice *>( &device ) ) ); 482 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); 483 484 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 485 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) ); 486 } 487 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 488 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 489 490 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const491 VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 492 { 493 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 494 d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 495 } 496 497 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 498 template <typename Dispatch> destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const499 VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 500 { 501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 502 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 503 VULKAN_HPP_ASSERT( d.vkDestroyDevice && "Function <vkDestroyDevice> requires <VK_VERSION_1_0>" ); 504 # endif 505 506 d.vkDestroyDevice( m_device, 507 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 508 } 509 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 510 511 template <typename Dispatch> enumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d)512 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, 513 uint32_t * pPropertyCount, 514 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 515 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 516 { 517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 518 return static_cast<Result>( 519 d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 520 } 521 522 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 523 template <typename ExtensionPropertiesAllocator, typename Dispatch> 524 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d)525 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) 526 { 527 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 528 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 529 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" ); 530 # endif 531 532 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; 533 uint32_t propertyCount; 534 VULKAN_HPP_NAMESPACE::Result result; 535 do 536 { 537 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 538 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 539 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 540 { 541 properties.resize( propertyCount ); 542 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties( 543 layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 544 } 545 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 546 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 547 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 548 if ( propertyCount < properties.size() ) 549 { 550 properties.resize( propertyCount ); 551 } 552 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 553 } 554 555 template < 556 typename ExtensionPropertiesAllocator, 557 typename Dispatch, 558 typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 559 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d)560 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, 561 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 562 Dispatch const & d ) 563 { 564 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 565 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 566 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" ); 567 # endif 568 569 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 570 uint32_t propertyCount; 571 VULKAN_HPP_NAMESPACE::Result result; 572 do 573 { 574 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 575 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 576 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 577 { 578 properties.resize( propertyCount ); 579 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties( 580 layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 581 } 582 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 583 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 584 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 585 if ( propertyCount < properties.size() ) 586 { 587 properties.resize( propertyCount ); 588 } 589 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 590 } 591 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 592 593 template <typename Dispatch> enumerateDeviceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d) const594 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, 595 uint32_t * pPropertyCount, 596 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 597 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 598 { 599 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 600 return static_cast<Result>( 601 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 602 } 603 604 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 605 template <typename ExtensionPropertiesAllocator, typename Dispatch> 606 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d) const607 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const 608 { 609 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 610 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 611 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" ); 612 # endif 613 614 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; 615 uint32_t propertyCount; 616 VULKAN_HPP_NAMESPACE::Result result; 617 do 618 { 619 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 620 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 621 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 622 { 623 properties.resize( propertyCount ); 624 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties( 625 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 626 } 627 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 628 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 629 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 630 if ( propertyCount < properties.size() ) 631 { 632 properties.resize( propertyCount ); 633 } 634 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 635 } 636 637 template < 638 typename ExtensionPropertiesAllocator, 639 typename Dispatch, 640 typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 641 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d) const642 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, 643 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 644 Dispatch const & d ) const 645 { 646 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 647 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 648 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" ); 649 # endif 650 651 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 652 uint32_t propertyCount; 653 VULKAN_HPP_NAMESPACE::Result result; 654 do 655 { 656 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 657 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 658 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 659 { 660 properties.resize( propertyCount ); 661 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties( 662 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 663 } 664 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 665 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 666 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 667 if ( propertyCount < properties.size() ) 668 { 669 properties.resize( propertyCount ); 670 } 671 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 672 } 673 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 674 675 template <typename Dispatch> enumerateInstanceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d)676 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, 677 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 678 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 679 { 680 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 681 return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 682 } 683 684 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 685 template <typename LayerPropertiesAllocator, typename Dispatch> 686 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(Dispatch const & d)687 enumerateInstanceLayerProperties( Dispatch const & d ) 688 { 689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 690 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 691 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" ); 692 # endif 693 694 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; 695 uint32_t propertyCount; 696 VULKAN_HPP_NAMESPACE::Result result; 697 do 698 { 699 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 700 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 701 { 702 properties.resize( propertyCount ); 703 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 704 d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 705 } 706 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 707 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 708 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 709 if ( propertyCount < properties.size() ) 710 { 711 properties.resize( propertyCount ); 712 } 713 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 714 } 715 716 template <typename LayerPropertiesAllocator, 717 typename Dispatch, 718 typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 719 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d)720 enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) 721 { 722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 723 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 724 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" ); 725 # endif 726 727 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 728 uint32_t propertyCount; 729 VULKAN_HPP_NAMESPACE::Result result; 730 do 731 { 732 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 733 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 734 { 735 properties.resize( propertyCount ); 736 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 737 d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 738 } 739 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 740 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 741 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 742 if ( propertyCount < properties.size() ) 743 { 744 properties.resize( propertyCount ); 745 } 746 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 747 } 748 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 749 750 template <typename Dispatch> enumerateDeviceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d) const751 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, 752 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 753 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 754 { 755 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 756 return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 757 } 758 759 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 760 template <typename LayerPropertiesAllocator, typename Dispatch> 761 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(Dispatch const & d) const762 PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const 763 { 764 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 765 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 766 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" ); 767 # endif 768 769 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; 770 uint32_t propertyCount; 771 VULKAN_HPP_NAMESPACE::Result result; 772 do 773 { 774 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 775 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 776 { 777 properties.resize( propertyCount ); 778 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 779 d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 780 } 781 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 782 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 783 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 784 if ( propertyCount < properties.size() ) 785 { 786 properties.resize( propertyCount ); 787 } 788 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 789 } 790 791 template <typename LayerPropertiesAllocator, 792 typename Dispatch, 793 typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 794 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d) const795 PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const 796 { 797 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 798 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 799 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" ); 800 # endif 801 802 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 803 uint32_t propertyCount; 804 VULKAN_HPP_NAMESPACE::Result result; 805 do 806 { 807 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 808 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 809 { 810 properties.resize( propertyCount ); 811 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 812 d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 813 } 814 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 815 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 816 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 817 if ( propertyCount < properties.size() ) 818 { 819 properties.resize( propertyCount ); 820 } 821 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 822 } 823 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 824 825 template <typename Dispatch> 826 VULKAN_HPP_INLINE void getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const827 Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 828 { 829 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 830 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) ); 831 } 832 833 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 834 template <typename Dispatch> 835 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,Dispatch const & d) const836 Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 837 { 838 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 839 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 840 VULKAN_HPP_ASSERT( d.vkGetDeviceQueue && "Function <vkGetDeviceQueue> requires <VK_VERSION_1_0>" ); 841 # endif 842 843 VULKAN_HPP_NAMESPACE::Queue queue; 844 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) ); 845 846 return queue; 847 } 848 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 849 850 template <typename Dispatch> submit(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const851 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, 852 const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, 853 VULKAN_HPP_NAMESPACE::Fence fence, 854 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 855 { 856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 857 return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 858 } 859 860 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 861 template <typename Dispatch> submit(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const862 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( 863 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 864 { 865 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 866 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 867 VULKAN_HPP_ASSERT( d.vkQueueSubmit && "Function <vkQueueSubmit> requires <VK_VERSION_1_0>" ); 868 # endif 869 870 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 871 d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 872 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); 873 874 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 875 } 876 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 877 878 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 879 template <typename Dispatch> waitIdle(Dispatch const & d) const880 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 881 { 882 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 883 return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) ); 884 } 885 #else 886 template <typename Dispatch> waitIdle(Dispatch const & d) const887 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const 888 { 889 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 890 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 891 VULKAN_HPP_ASSERT( d.vkQueueWaitIdle && "Function <vkQueueWaitIdle> requires <VK_VERSION_1_0>" ); 892 # endif 893 894 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueueWaitIdle( m_queue ) ); 895 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); 896 897 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 898 } 899 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 900 901 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 902 template <typename Dispatch> waitIdle(Dispatch const & d) const903 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 904 { 905 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 906 return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) ); 907 } 908 #else 909 template <typename Dispatch> waitIdle(Dispatch const & d) const910 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const 911 { 912 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 913 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 914 VULKAN_HPP_ASSERT( d.vkDeviceWaitIdle && "Function <vkDeviceWaitIdle> requires <VK_VERSION_1_0>" ); 915 # endif 916 917 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeviceWaitIdle( m_device ) ); 918 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); 919 920 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 921 } 922 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 923 924 template <typename Dispatch> allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,Dispatch const & d) const925 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, 926 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 927 VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, 928 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 929 { 930 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 931 return static_cast<Result>( d.vkAllocateMemory( m_device, 932 reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), 933 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 934 reinterpret_cast<VkDeviceMemory *>( pMemory ) ) ); 935 } 936 937 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 938 template <typename Dispatch> 939 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const940 Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, 941 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 942 Dispatch const & d ) const 943 { 944 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 945 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 946 VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" ); 947 # endif 948 949 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 950 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 951 d.vkAllocateMemory( m_device, 952 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 953 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 954 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 955 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); 956 957 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memory ) ); 958 } 959 960 # ifndef VULKAN_HPP_NO_SMART_HANDLE 961 template <typename Dispatch> 962 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type allocateMemoryUnique(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const963 Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, 964 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 965 Dispatch const & d ) const 966 { 967 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 968 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 969 VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" ); 970 # endif 971 972 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 973 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 974 d.vkAllocateMemory( m_device, 975 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 976 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 977 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 978 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); 979 980 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 981 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) ); 982 } 983 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 984 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 985 986 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const987 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 988 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 989 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 990 { 991 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 992 d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 993 } 994 995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 996 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const997 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 998 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 999 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1000 { 1001 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1002 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1003 VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" ); 1004 # endif 1005 1006 d.vkFreeMemory( m_device, 1007 static_cast<VkDeviceMemory>( memory ), 1008 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1009 } 1010 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1011 1012 template <typename Dispatch> 1013 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1014 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1015 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1016 { 1017 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1018 d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1019 } 1020 1021 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1022 template <typename Dispatch> 1023 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1024 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1025 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1026 { 1027 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1028 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1029 VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" ); 1030 # endif 1031 1032 d.vkFreeMemory( m_device, 1033 static_cast<VkDeviceMemory>( memory ), 1034 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1035 } 1036 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1037 1038 template <typename Dispatch> mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,void ** ppData,Dispatch const & d) const1039 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1040 VULKAN_HPP_NAMESPACE::DeviceSize offset, 1041 VULKAN_HPP_NAMESPACE::DeviceSize size, 1042 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 1043 void ** ppData, 1044 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1045 { 1046 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1047 return static_cast<Result>( d.vkMapMemory( m_device, 1048 static_cast<VkDeviceMemory>( memory ), 1049 static_cast<VkDeviceSize>( offset ), 1050 static_cast<VkDeviceSize>( size ), 1051 static_cast<VkMemoryMapFlags>( flags ), 1052 ppData ) ); 1053 } 1054 1055 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1056 template <typename Dispatch> mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,Dispatch const & d) const1057 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1058 VULKAN_HPP_NAMESPACE::DeviceSize offset, 1059 VULKAN_HPP_NAMESPACE::DeviceSize size, 1060 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 1061 Dispatch const & d ) const 1062 { 1063 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1064 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1065 VULKAN_HPP_ASSERT( d.vkMapMemory && "Function <vkMapMemory> requires <VK_VERSION_1_0>" ); 1066 # endif 1067 1068 void * pData; 1069 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory( m_device, 1070 static_cast<VkDeviceMemory>( memory ), 1071 static_cast<VkDeviceSize>( offset ), 1072 static_cast<VkDeviceSize>( size ), 1073 static_cast<VkMemoryMapFlags>( flags ), 1074 &pData ) ); 1075 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); 1076 1077 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); 1078 } 1079 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1080 1081 template <typename Dispatch> unmapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const1082 VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1083 { 1084 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1085 d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) ); 1086 } 1087 1088 template <typename Dispatch> flushMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const1089 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, 1090 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 1091 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1092 { 1093 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1094 return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 1095 } 1096 1097 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1098 template <typename Dispatch> 1099 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type flushMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const1100 Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, 1101 Dispatch const & d ) const 1102 { 1103 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1104 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1105 VULKAN_HPP_ASSERT( d.vkFlushMappedMemoryRanges && "Function <vkFlushMappedMemoryRanges> requires <VK_VERSION_1_0>" ); 1106 # endif 1107 1108 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1109 d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 1110 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); 1111 1112 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1113 } 1114 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1115 1116 template <typename Dispatch> invalidateMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const1117 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, 1118 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 1119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1120 { 1121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1122 return static_cast<Result>( 1123 d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 1124 } 1125 1126 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1127 template <typename Dispatch> 1128 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type invalidateMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const1129 Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, 1130 Dispatch const & d ) const 1131 { 1132 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1133 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1134 VULKAN_HPP_ASSERT( d.vkInvalidateMappedMemoryRanges && "Function <vkInvalidateMappedMemoryRanges> requires <VK_VERSION_1_0>" ); 1135 # endif 1136 1137 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1138 d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 1139 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); 1140 1141 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1142 } 1143 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1144 1145 template <typename Dispatch> getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,Dispatch const & d) const1146 VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1147 VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, 1148 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1149 { 1150 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1151 d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) ); 1152 } 1153 1154 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1155 template <typename Dispatch> getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const1156 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1157 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1158 { 1159 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1160 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1161 VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryCommitment && "Function <vkGetDeviceMemoryCommitment> requires <VK_VERSION_1_0>" ); 1162 # endif 1163 1164 VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; 1165 d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) ); 1166 1167 return committedMemoryInBytes; 1168 } 1169 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1170 1171 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1172 template <typename Dispatch> bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1173 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, 1174 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1175 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1176 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1177 { 1178 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1179 return static_cast<Result>( 1180 d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1181 } 1182 #else 1183 template <typename Dispatch> bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1184 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( 1185 VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const 1186 { 1187 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1188 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1189 VULKAN_HPP_ASSERT( d.vkBindBufferMemory && "Function <vkBindBufferMemory> requires <VK_VERSION_1_0>" ); 1190 # endif 1191 1192 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1193 d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1194 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); 1195 1196 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1197 } 1198 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1199 1200 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1201 template <typename Dispatch> bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1202 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, 1203 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1204 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1205 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1206 { 1207 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1208 return static_cast<Result>( 1209 d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1210 } 1211 #else 1212 template <typename Dispatch> bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1213 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( 1214 VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const 1215 { 1216 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1217 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1218 VULKAN_HPP_ASSERT( d.vkBindImageMemory && "Function <vkBindImageMemory> requires <VK_VERSION_1_0>" ); 1219 # endif 1220 1221 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1222 d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1223 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); 1224 1225 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1226 } 1227 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1228 1229 template <typename Dispatch> getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1230 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, 1231 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1232 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1233 { 1234 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1235 d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1236 } 1237 1238 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1239 template <typename Dispatch> 1240 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,Dispatch const & d) const1241 Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1242 { 1243 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1244 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1245 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements && "Function <vkGetBufferMemoryRequirements> requires <VK_VERSION_1_0>" ); 1246 # endif 1247 1248 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1249 d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1250 1251 return memoryRequirements; 1252 } 1253 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1254 1255 template <typename Dispatch> getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1256 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1257 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1258 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1259 { 1260 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1261 d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1262 } 1263 1264 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1265 template <typename Dispatch> 1266 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1267 Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1268 { 1269 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1270 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1271 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements && "Function <vkGetImageMemoryRequirements> requires <VK_VERSION_1_0>" ); 1272 # endif 1273 1274 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1275 d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1276 1277 return memoryRequirements; 1278 } 1279 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1280 1281 template <typename Dispatch> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,Dispatch const & d) const1282 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1283 uint32_t * pSparseMemoryRequirementCount, 1284 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, 1285 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1286 { 1287 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1288 d.vkGetImageSparseMemoryRequirements( m_device, 1289 static_cast<VkImage>( image ), 1290 pSparseMemoryRequirementCount, 1291 reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) ); 1292 } 1293 1294 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1295 template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch> 1296 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1297 Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 1298 { 1299 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1300 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1301 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" ); 1302 # endif 1303 1304 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements; 1305 uint32_t sparseMemoryRequirementCount; 1306 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1307 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1308 d.vkGetImageSparseMemoryRequirements( m_device, 1309 static_cast<VkImage>( image ), 1310 &sparseMemoryRequirementCount, 1311 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1312 1313 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1314 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 1315 { 1316 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1317 } 1318 return sparseMemoryRequirements; 1319 } 1320 1321 template <typename SparseImageMemoryRequirementsAllocator, 1322 typename Dispatch, 1323 typename std::enable_if< 1324 std::is_same<typename SparseImageMemoryRequirementsAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, 1325 int>::type> 1326 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,Dispatch const & d) const1327 Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1328 SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, 1329 Dispatch const & d ) const 1330 { 1331 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1332 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1333 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" ); 1334 # endif 1335 1336 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( 1337 sparseImageMemoryRequirementsAllocator ); 1338 uint32_t sparseMemoryRequirementCount; 1339 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1340 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1341 d.vkGetImageSparseMemoryRequirements( m_device, 1342 static_cast<VkImage>( image ), 1343 &sparseMemoryRequirementCount, 1344 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1345 1346 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1347 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 1348 { 1349 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1350 } 1351 return sparseMemoryRequirements; 1352 } 1353 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1354 1355 template <typename Dispatch> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,Dispatch const & d) const1356 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1357 VULKAN_HPP_NAMESPACE::ImageType type, 1358 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1359 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1360 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1361 uint32_t * pPropertyCount, 1362 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, 1363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1364 { 1365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1366 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1367 static_cast<VkFormat>( format ), 1368 static_cast<VkImageType>( type ), 1369 static_cast<VkSampleCountFlagBits>( samples ), 1370 static_cast<VkImageUsageFlags>( usage ), 1371 static_cast<VkImageTiling>( tiling ), 1372 pPropertyCount, 1373 reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) ); 1374 } 1375 1376 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1377 template <typename SparseImageFormatPropertiesAllocator, typename Dispatch> 1378 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,Dispatch const & d) const1379 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1380 VULKAN_HPP_NAMESPACE::ImageType type, 1381 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1382 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1383 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1384 Dispatch const & d ) const 1385 { 1386 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1387 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1388 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && 1389 "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" ); 1390 # endif 1391 1392 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties; 1393 uint32_t propertyCount; 1394 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1395 static_cast<VkFormat>( format ), 1396 static_cast<VkImageType>( type ), 1397 static_cast<VkSampleCountFlagBits>( samples ), 1398 static_cast<VkImageUsageFlags>( usage ), 1399 static_cast<VkImageTiling>( tiling ), 1400 &propertyCount, 1401 nullptr ); 1402 properties.resize( propertyCount ); 1403 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1404 static_cast<VkFormat>( format ), 1405 static_cast<VkImageType>( type ), 1406 static_cast<VkSampleCountFlagBits>( samples ), 1407 static_cast<VkImageUsageFlags>( usage ), 1408 static_cast<VkImageTiling>( tiling ), 1409 &propertyCount, 1410 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1411 1412 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1413 if ( propertyCount < properties.size() ) 1414 { 1415 properties.resize( propertyCount ); 1416 } 1417 return properties; 1418 } 1419 1420 template < 1421 typename SparseImageFormatPropertiesAllocator, 1422 typename Dispatch, 1423 typename std::enable_if<std::is_same<typename SparseImageFormatPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, 1424 int>::type> 1425 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,Dispatch const & d) const1426 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1427 VULKAN_HPP_NAMESPACE::ImageType type, 1428 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1429 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1430 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1431 SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, 1432 Dispatch const & d ) const 1433 { 1434 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1435 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1436 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && 1437 "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" ); 1438 # endif 1439 1440 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator ); 1441 uint32_t propertyCount; 1442 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1443 static_cast<VkFormat>( format ), 1444 static_cast<VkImageType>( type ), 1445 static_cast<VkSampleCountFlagBits>( samples ), 1446 static_cast<VkImageUsageFlags>( usage ), 1447 static_cast<VkImageTiling>( tiling ), 1448 &propertyCount, 1449 nullptr ); 1450 properties.resize( propertyCount ); 1451 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1452 static_cast<VkFormat>( format ), 1453 static_cast<VkImageType>( type ), 1454 static_cast<VkSampleCountFlagBits>( samples ), 1455 static_cast<VkImageUsageFlags>( usage ), 1456 static_cast<VkImageTiling>( tiling ), 1457 &propertyCount, 1458 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1459 1460 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1461 if ( propertyCount < properties.size() ) 1462 { 1463 properties.resize( propertyCount ); 1464 } 1465 return properties; 1466 } 1467 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1468 1469 template <typename Dispatch> bindSparse(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1470 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, 1471 const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, 1472 VULKAN_HPP_NAMESPACE::Fence fence, 1473 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1474 { 1475 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1476 return static_cast<Result>( 1477 d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) ); 1478 } 1479 1480 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1481 template <typename Dispatch> bindSparse(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1482 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( 1483 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 1484 { 1485 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1486 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1487 VULKAN_HPP_ASSERT( d.vkQueueBindSparse && "Function <vkQueueBindSparse> requires <VK_VERSION_1_0>" ); 1488 # endif 1489 1490 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1491 d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ) ); 1492 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); 1493 1494 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1495 } 1496 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1497 1498 template <typename Dispatch> createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const1499 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, 1500 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1501 VULKAN_HPP_NAMESPACE::Fence * pFence, 1502 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1503 { 1504 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1505 return static_cast<Result>( d.vkCreateFence( m_device, 1506 reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), 1507 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1508 reinterpret_cast<VkFence *>( pFence ) ) ); 1509 } 1510 1511 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1512 template <typename Dispatch> createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1513 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( 1514 const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1515 { 1516 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1517 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1518 VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" ); 1519 # endif 1520 1521 VULKAN_HPP_NAMESPACE::Fence fence; 1522 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1523 d.vkCreateFence( m_device, 1524 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1525 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1526 reinterpret_cast<VkFence *>( &fence ) ) ); 1527 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); 1528 1529 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); 1530 } 1531 1532 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1533 template <typename Dispatch> createFenceUnique(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1534 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( 1535 const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1536 { 1537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1538 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1539 VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" ); 1540 # endif 1541 1542 VULKAN_HPP_NAMESPACE::Fence fence; 1543 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1544 d.vkCreateFence( m_device, 1545 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1546 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1547 reinterpret_cast<VkFence *>( &fence ) ) ); 1548 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); 1549 1550 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1551 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1552 } 1553 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1554 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1555 1556 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1557 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1558 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1559 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1560 { 1561 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1562 d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1563 } 1564 1565 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1566 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1567 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1568 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1569 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1570 { 1571 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1572 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1573 VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" ); 1574 # endif 1575 1576 d.vkDestroyFence( m_device, 1577 static_cast<VkFence>( fence ), 1578 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1579 } 1580 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1581 1582 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1583 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1584 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1585 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1586 { 1587 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1588 d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1589 } 1590 1591 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1592 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1593 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1594 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1595 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1596 { 1597 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1599 VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" ); 1600 # endif 1601 1602 d.vkDestroyFence( m_device, 1603 static_cast<VkFence>( fence ), 1604 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1605 } 1606 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1607 1608 template <typename Dispatch> resetFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,Dispatch const & d) const1609 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, 1610 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1611 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1612 { 1613 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1614 return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) ); 1615 } 1616 1617 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1618 template <typename Dispatch> 1619 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,Dispatch const & d) const1620 Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const 1621 { 1622 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1623 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1624 VULKAN_HPP_ASSERT( d.vkResetFences && "Function <vkResetFences> requires <VK_VERSION_1_0>" ); 1625 # endif 1626 1627 VULKAN_HPP_NAMESPACE::Result result = 1628 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) ); 1629 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); 1630 1631 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1632 } 1633 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1634 1635 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1636 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1637 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1638 { 1639 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1640 return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); 1641 } 1642 #else 1643 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1644 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 1645 { 1646 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1647 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1648 VULKAN_HPP_ASSERT( d.vkGetFenceStatus && "Function <vkGetFenceStatus> requires <VK_VERSION_1_0>" ); 1649 # endif 1650 1651 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); 1652 VULKAN_HPP_NAMESPACE::detail::resultCheck( 1653 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1654 1655 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1656 } 1657 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1658 1659 template <typename Dispatch> waitForFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1660 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, 1661 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1662 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1663 uint64_t timeout, 1664 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1665 { 1666 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1667 return static_cast<Result>( 1668 d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) ); 1669 } 1670 1671 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1672 template <typename Dispatch> 1673 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitForFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1674 Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, 1675 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1676 uint64_t timeout, 1677 Dispatch const & d ) const 1678 { 1679 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1680 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1681 VULKAN_HPP_ASSERT( d.vkWaitForFences && "Function <vkWaitForFences> requires <VK_VERSION_1_0>" ); 1682 # endif 1683 1684 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1685 d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) ); 1686 VULKAN_HPP_NAMESPACE::detail::resultCheck( 1687 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 1688 1689 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1690 } 1691 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1692 1693 template <typename Dispatch> createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,Dispatch const & d) const1694 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, 1695 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1696 VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, 1697 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1698 { 1699 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1700 return static_cast<Result>( d.vkCreateSemaphore( m_device, 1701 reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), 1702 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1703 reinterpret_cast<VkSemaphore *>( pSemaphore ) ) ); 1704 } 1705 1706 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1707 template <typename Dispatch> 1708 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1709 Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, 1710 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1711 Dispatch const & d ) const 1712 { 1713 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1714 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1715 VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" ); 1716 # endif 1717 1718 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1719 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1720 d.vkCreateSemaphore( m_device, 1721 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1722 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1723 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1724 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); 1725 1726 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( semaphore ) ); 1727 } 1728 1729 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1730 template <typename Dispatch> 1731 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type createSemaphoreUnique(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1732 Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, 1733 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1734 Dispatch const & d ) const 1735 { 1736 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1737 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1738 VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" ); 1739 # endif 1740 1741 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1742 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1743 d.vkCreateSemaphore( m_device, 1744 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1745 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1746 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1747 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); 1748 1749 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1750 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1751 } 1752 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1753 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1754 1755 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1756 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1757 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1758 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1759 { 1760 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1761 d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1762 } 1763 1764 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1765 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1766 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1767 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1768 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1769 { 1770 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1771 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1772 VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" ); 1773 # endif 1774 1775 d.vkDestroySemaphore( m_device, 1776 static_cast<VkSemaphore>( semaphore ), 1777 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1778 } 1779 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1780 1781 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1782 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1783 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1784 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1785 { 1786 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1787 d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1788 } 1789 1790 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1791 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1792 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1793 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1794 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1795 { 1796 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1797 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1798 VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" ); 1799 # endif 1800 1801 d.vkDestroySemaphore( m_device, 1802 static_cast<VkSemaphore>( semaphore ), 1803 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1804 } 1805 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1806 1807 template <typename Dispatch> createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Event * pEvent,Dispatch const & d) const1808 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, 1809 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1810 VULKAN_HPP_NAMESPACE::Event * pEvent, 1811 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1812 { 1813 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1814 return static_cast<Result>( d.vkCreateEvent( m_device, 1815 reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), 1816 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1817 reinterpret_cast<VkEvent *>( pEvent ) ) ); 1818 } 1819 1820 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1821 template <typename Dispatch> createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1822 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( 1823 const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1824 { 1825 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1826 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1827 VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" ); 1828 # endif 1829 1830 VULKAN_HPP_NAMESPACE::Event event; 1831 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1832 d.vkCreateEvent( m_device, 1833 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1834 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1835 reinterpret_cast<VkEvent *>( &event ) ) ); 1836 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); 1837 1838 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( event ) ); 1839 } 1840 1841 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1842 template <typename Dispatch> createEventUnique(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1843 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( 1844 const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1845 { 1846 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1847 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1848 VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" ); 1849 # endif 1850 1851 VULKAN_HPP_NAMESPACE::Event event; 1852 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1853 d.vkCreateEvent( m_device, 1854 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1855 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1856 reinterpret_cast<VkEvent *>( &event ) ) ); 1857 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); 1858 1859 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1860 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1861 } 1862 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1863 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1864 1865 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1866 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1867 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1868 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1869 { 1870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1871 d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1872 } 1873 1874 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1875 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1876 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1877 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1878 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1879 { 1880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1881 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1882 VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" ); 1883 # endif 1884 1885 d.vkDestroyEvent( m_device, 1886 static_cast<VkEvent>( event ), 1887 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1888 } 1889 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1890 1891 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1892 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1893 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1894 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1895 { 1896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1897 d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1898 } 1899 1900 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1901 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1902 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1903 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1904 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1905 { 1906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1907 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1908 VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" ); 1909 # endif 1910 1911 d.vkDestroyEvent( m_device, 1912 static_cast<VkEvent>( event ), 1913 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1914 } 1915 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1916 1917 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1918 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1919 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1920 { 1921 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1922 return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); 1923 } 1924 #else 1925 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1926 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const 1927 { 1928 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1929 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1930 VULKAN_HPP_ASSERT( d.vkGetEventStatus && "Function <vkGetEventStatus> requires <VK_VERSION_1_0>" ); 1931 # endif 1932 1933 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); 1934 VULKAN_HPP_NAMESPACE::detail::resultCheck( 1935 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); 1936 1937 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1938 } 1939 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1940 1941 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1942 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1943 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1944 { 1945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1946 return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1947 } 1948 #else 1949 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1950 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, 1951 Dispatch const & d ) const 1952 { 1953 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1954 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1955 VULKAN_HPP_ASSERT( d.vkSetEvent && "Function <vkSetEvent> requires <VK_VERSION_1_0>" ); 1956 # endif 1957 1958 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1959 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); 1960 1961 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1962 } 1963 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1964 1965 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1966 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1967 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1968 { 1969 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1970 return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1971 } 1972 #else 1973 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1974 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const 1975 { 1976 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1977 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1978 VULKAN_HPP_ASSERT( d.vkResetEvent && "Function <vkResetEvent> requires <VK_VERSION_1_0>" ); 1979 # endif 1980 1981 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1982 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); 1983 1984 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1985 } 1986 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1987 1988 template <typename Dispatch> createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,Dispatch const & d) const1989 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, 1990 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1991 VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, 1992 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1993 { 1994 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1995 return static_cast<Result>( d.vkCreateQueryPool( m_device, 1996 reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), 1997 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1998 reinterpret_cast<VkQueryPool *>( pQueryPool ) ) ); 1999 } 2000 2001 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2002 template <typename Dispatch> 2003 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2004 Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, 2005 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2006 Dispatch const & d ) const 2007 { 2008 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2009 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2010 VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" ); 2011 # endif 2012 2013 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 2014 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2015 d.vkCreateQueryPool( m_device, 2016 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 2017 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2018 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 2019 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); 2020 2021 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( queryPool ) ); 2022 } 2023 2024 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2025 template <typename Dispatch> 2026 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type createQueryPoolUnique(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2027 Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, 2028 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2029 Dispatch const & d ) const 2030 { 2031 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2032 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2033 VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" ); 2034 # endif 2035 2036 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 2037 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2038 d.vkCreateQueryPool( m_device, 2039 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 2040 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2041 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 2042 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); 2043 2044 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2045 result, UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2046 } 2047 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2048 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2049 2050 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2051 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2052 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2053 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2054 { 2055 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2056 d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2057 } 2058 2059 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2060 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2061 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2062 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2063 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2064 { 2065 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2066 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2067 VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" ); 2068 # endif 2069 2070 d.vkDestroyQueryPool( m_device, 2071 static_cast<VkQueryPool>( queryPool ), 2072 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2073 } 2074 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2075 2076 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2077 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2078 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2079 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2080 { 2081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2082 d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2083 } 2084 2085 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2086 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2087 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2088 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2089 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2090 { 2091 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2092 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2093 VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" ); 2094 # endif 2095 2096 d.vkDestroyQueryPool( m_device, 2097 static_cast<VkQueryPool>( queryPool ), 2098 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2099 } 2100 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2101 2102 template <typename Dispatch> getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const2103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2104 uint32_t firstQuery, 2105 uint32_t queryCount, 2106 size_t dataSize, 2107 void * pData, 2108 VULKAN_HPP_NAMESPACE::DeviceSize stride, 2109 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 2110 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2111 { 2112 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2113 return static_cast<Result>( d.vkGetQueryPoolResults( m_device, 2114 static_cast<VkQueryPool>( queryPool ), 2115 firstQuery, 2116 queryCount, 2117 dataSize, 2118 pData, 2119 static_cast<VkDeviceSize>( stride ), 2120 static_cast<VkQueryResultFlags>( flags ) ) ); 2121 } 2122 2123 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2124 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 2125 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>> getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const2126 Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2127 uint32_t firstQuery, 2128 uint32_t queryCount, 2129 size_t dataSize, 2130 VULKAN_HPP_NAMESPACE::DeviceSize stride, 2131 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 2132 Dispatch const & d ) const 2133 { 2134 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2135 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2136 VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" ); 2137 # endif 2138 2139 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 2140 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 2141 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device, 2142 static_cast<VkQueryPool>( queryPool ), 2143 firstQuery, 2144 queryCount, 2145 data.size() * sizeof( DataType ), 2146 reinterpret_cast<void *>( data.data() ), 2147 static_cast<VkDeviceSize>( stride ), 2148 static_cast<VkQueryResultFlags>( flags ) ) ); 2149 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 2150 VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", 2151 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 2152 2153 return ResultValue<std::vector<DataType, DataTypeAllocator>>( result, std::move( data ) ); 2154 } 2155 2156 template <typename DataType, typename Dispatch> getQueryPoolResult(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const2157 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2158 uint32_t firstQuery, 2159 uint32_t queryCount, 2160 VULKAN_HPP_NAMESPACE::DeviceSize stride, 2161 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 2162 Dispatch const & d ) const 2163 { 2164 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2165 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2166 VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" ); 2167 # endif 2168 2169 DataType data; 2170 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device, 2171 static_cast<VkQueryPool>( queryPool ), 2172 firstQuery, 2173 queryCount, 2174 sizeof( DataType ), 2175 reinterpret_cast<void *>( &data ), 2176 static_cast<VkDeviceSize>( stride ), 2177 static_cast<VkQueryResultFlags>( flags ) ) ); 2178 VULKAN_HPP_NAMESPACE::detail::resultCheck( 2179 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 2180 2181 return ResultValue<DataType>( result, std::move( data ) ); 2182 } 2183 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2184 2185 template <typename Dispatch> createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Buffer * pBuffer,Dispatch const & d) const2186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, 2187 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2188 VULKAN_HPP_NAMESPACE::Buffer * pBuffer, 2189 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2190 { 2191 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2192 return static_cast<Result>( d.vkCreateBuffer( m_device, 2193 reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), 2194 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2195 reinterpret_cast<VkBuffer *>( pBuffer ) ) ); 2196 } 2197 2198 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2199 template <typename Dispatch> createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2200 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( 2201 const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2202 { 2203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2204 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2205 VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" ); 2206 # endif 2207 2208 VULKAN_HPP_NAMESPACE::Buffer buffer; 2209 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2210 d.vkCreateBuffer( m_device, 2211 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 2212 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2213 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 2214 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); 2215 2216 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); 2217 } 2218 2219 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2220 template <typename Dispatch> createBufferUnique(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2221 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( 2222 const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2223 { 2224 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2225 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2226 VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" ); 2227 # endif 2228 2229 VULKAN_HPP_NAMESPACE::Buffer buffer; 2230 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2231 d.vkCreateBuffer( m_device, 2232 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 2233 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2234 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 2235 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); 2236 2237 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2238 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2239 } 2240 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2241 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2242 2243 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2244 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 2245 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2246 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2247 { 2248 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2249 d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2250 } 2251 2252 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2253 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2254 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 2255 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2256 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2257 { 2258 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2259 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2260 VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" ); 2261 # endif 2262 2263 d.vkDestroyBuffer( m_device, 2264 static_cast<VkBuffer>( buffer ), 2265 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2266 } 2267 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2268 2269 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2270 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 2271 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2272 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2273 { 2274 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2275 d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2276 } 2277 2278 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2279 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2280 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 2281 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2282 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2283 { 2284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2285 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2286 VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" ); 2287 # endif 2288 2289 d.vkDestroyBuffer( m_device, 2290 static_cast<VkBuffer>( buffer ), 2291 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2292 } 2293 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2294 2295 template <typename Dispatch> createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferView * pView,Dispatch const & d) const2296 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, 2297 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2298 VULKAN_HPP_NAMESPACE::BufferView * pView, 2299 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2300 { 2301 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2302 return static_cast<Result>( d.vkCreateBufferView( m_device, 2303 reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), 2304 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2305 reinterpret_cast<VkBufferView *>( pView ) ) ); 2306 } 2307 2308 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2309 template <typename Dispatch> 2310 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2311 Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, 2312 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2313 Dispatch const & d ) const 2314 { 2315 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2316 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2317 VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" ); 2318 # endif 2319 2320 VULKAN_HPP_NAMESPACE::BufferView view; 2321 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2322 d.vkCreateBufferView( m_device, 2323 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2324 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2325 reinterpret_cast<VkBufferView *>( &view ) ) ); 2326 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); 2327 2328 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); 2329 } 2330 2331 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2332 template <typename Dispatch> 2333 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type createBufferViewUnique(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2334 Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, 2335 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2336 Dispatch const & d ) const 2337 { 2338 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2339 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2340 VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" ); 2341 # endif 2342 2343 VULKAN_HPP_NAMESPACE::BufferView view; 2344 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2345 d.vkCreateBufferView( m_device, 2346 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2347 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2348 reinterpret_cast<VkBufferView *>( &view ) ) ); 2349 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); 2350 2351 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2352 result, UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2353 } 2354 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2355 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2356 2357 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2358 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2359 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2360 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2361 { 2362 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2363 d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2364 } 2365 2366 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2367 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2368 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2369 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2370 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2371 { 2372 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2373 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2374 VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" ); 2375 # endif 2376 2377 d.vkDestroyBufferView( m_device, 2378 static_cast<VkBufferView>( bufferView ), 2379 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2380 } 2381 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2382 2383 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2384 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2385 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2386 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2387 { 2388 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2389 d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2390 } 2391 2392 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2393 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2394 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2395 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2396 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2397 { 2398 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2399 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2400 VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" ); 2401 # endif 2402 2403 d.vkDestroyBufferView( m_device, 2404 static_cast<VkBufferView>( bufferView ), 2405 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2406 } 2407 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2408 2409 template <typename Dispatch> createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Image * pImage,Dispatch const & d) const2410 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, 2411 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2412 VULKAN_HPP_NAMESPACE::Image * pImage, 2413 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2414 { 2415 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2416 return static_cast<Result>( d.vkCreateImage( m_device, 2417 reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), 2418 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2419 reinterpret_cast<VkImage *>( pImage ) ) ); 2420 } 2421 2422 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2423 template <typename Dispatch> createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2424 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( 2425 const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2426 { 2427 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2428 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2429 VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" ); 2430 # endif 2431 2432 VULKAN_HPP_NAMESPACE::Image image; 2433 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2434 d.vkCreateImage( m_device, 2435 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2436 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2437 reinterpret_cast<VkImage *>( &image ) ) ); 2438 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); 2439 2440 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( image ) ); 2441 } 2442 2443 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2444 template <typename Dispatch> createImageUnique(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2445 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( 2446 const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2447 { 2448 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2449 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2450 VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" ); 2451 # endif 2452 2453 VULKAN_HPP_NAMESPACE::Image image; 2454 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2455 d.vkCreateImage( m_device, 2456 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2457 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2458 reinterpret_cast<VkImage *>( &image ) ) ); 2459 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); 2460 2461 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2462 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2463 } 2464 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2465 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2466 2467 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2468 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2469 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2470 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2471 { 2472 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2473 d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2474 } 2475 2476 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2477 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2478 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2479 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2480 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2481 { 2482 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2483 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2484 VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" ); 2485 # endif 2486 2487 d.vkDestroyImage( m_device, 2488 static_cast<VkImage>( image ), 2489 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2490 } 2491 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2492 2493 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2494 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2495 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2496 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2497 { 2498 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2499 d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2500 } 2501 2502 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2503 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2504 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2505 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2506 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2507 { 2508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2509 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2510 VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" ); 2511 # endif 2512 2513 d.vkDestroyImage( m_device, 2514 static_cast<VkImage>( image ), 2515 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2516 } 2517 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2518 2519 template <typename Dispatch> getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,Dispatch const & d) const2520 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, 2521 const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, 2522 VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, 2523 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2524 { 2525 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2526 d.vkGetImageSubresourceLayout( m_device, 2527 static_cast<VkImage>( image ), 2528 reinterpret_cast<const VkImageSubresource *>( pSubresource ), 2529 reinterpret_cast<VkSubresourceLayout *>( pLayout ) ); 2530 } 2531 2532 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2533 template <typename Dispatch> getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource,Dispatch const & d) const2534 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( 2535 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2536 { 2537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2538 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2539 VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout && "Function <vkGetImageSubresourceLayout> requires <VK_VERSION_1_0>" ); 2540 # endif 2541 2542 VULKAN_HPP_NAMESPACE::SubresourceLayout layout; 2543 d.vkGetImageSubresourceLayout( m_device, 2544 static_cast<VkImage>( image ), 2545 reinterpret_cast<const VkImageSubresource *>( &subresource ), 2546 reinterpret_cast<VkSubresourceLayout *>( &layout ) ); 2547 2548 return layout; 2549 } 2550 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2551 2552 template <typename Dispatch> createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ImageView * pView,Dispatch const & d) const2553 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, 2554 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2555 VULKAN_HPP_NAMESPACE::ImageView * pView, 2556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2557 { 2558 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2559 return static_cast<Result>( d.vkCreateImageView( m_device, 2560 reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), 2561 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2562 reinterpret_cast<VkImageView *>( pView ) ) ); 2563 } 2564 2565 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2566 template <typename Dispatch> 2567 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2568 Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, 2569 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2570 Dispatch const & d ) const 2571 { 2572 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2573 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2574 VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" ); 2575 # endif 2576 2577 VULKAN_HPP_NAMESPACE::ImageView view; 2578 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2579 d.vkCreateImageView( m_device, 2580 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2581 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2582 reinterpret_cast<VkImageView *>( &view ) ) ); 2583 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); 2584 2585 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); 2586 } 2587 2588 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2589 template <typename Dispatch> 2590 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type createImageViewUnique(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2591 Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, 2592 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2593 Dispatch const & d ) const 2594 { 2595 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2596 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2597 VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" ); 2598 # endif 2599 2600 VULKAN_HPP_NAMESPACE::ImageView view; 2601 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2602 d.vkCreateImageView( m_device, 2603 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2604 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2605 reinterpret_cast<VkImageView *>( &view ) ) ); 2606 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); 2607 2608 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2609 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2610 } 2611 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2612 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2613 2614 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2615 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2616 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2617 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2618 { 2619 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2620 d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2621 } 2622 2623 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2624 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2625 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2626 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2627 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2628 { 2629 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2630 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2631 VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" ); 2632 # endif 2633 2634 d.vkDestroyImageView( m_device, 2635 static_cast<VkImageView>( imageView ), 2636 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2637 } 2638 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2639 2640 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2641 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2642 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2643 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2644 { 2645 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2646 d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2647 } 2648 2649 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2650 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2651 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2652 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2653 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2654 { 2655 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2656 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2657 VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" ); 2658 # endif 2659 2660 d.vkDestroyImageView( m_device, 2661 static_cast<VkImageView>( imageView ), 2662 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2663 } 2664 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2665 2666 template <typename Dispatch> createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,Dispatch const & d) const2667 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, 2668 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2669 VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, 2670 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2671 { 2672 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2673 return static_cast<Result>( d.vkCreateShaderModule( m_device, 2674 reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), 2675 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2676 reinterpret_cast<VkShaderModule *>( pShaderModule ) ) ); 2677 } 2678 2679 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2680 template <typename Dispatch> 2681 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2682 Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 2683 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2684 Dispatch const & d ) const 2685 { 2686 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2687 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2688 VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" ); 2689 # endif 2690 2691 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2692 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2693 d.vkCreateShaderModule( m_device, 2694 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2695 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2696 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2697 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); 2698 2699 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( shaderModule ) ); 2700 } 2701 2702 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2703 template <typename Dispatch> 2704 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type createShaderModuleUnique(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2705 Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 2706 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2707 Dispatch const & d ) const 2708 { 2709 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2710 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2711 VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" ); 2712 # endif 2713 2714 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2715 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2716 d.vkCreateShaderModule( m_device, 2717 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2718 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2719 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2720 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); 2721 2722 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2723 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2724 } 2725 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2726 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2727 2728 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2729 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2730 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2731 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2732 { 2733 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2734 d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2735 } 2736 2737 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2738 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2739 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2740 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2742 { 2743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2744 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2745 VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" ); 2746 # endif 2747 2748 d.vkDestroyShaderModule( m_device, 2749 static_cast<VkShaderModule>( shaderModule ), 2750 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2751 } 2752 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2753 2754 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2755 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2756 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2757 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2758 { 2759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2760 d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2761 } 2762 2763 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2764 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2765 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2766 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2767 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2768 { 2769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2770 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2771 VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" ); 2772 # endif 2773 2774 d.vkDestroyShaderModule( m_device, 2775 static_cast<VkShaderModule>( shaderModule ), 2776 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2777 } 2778 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2779 2780 template <typename Dispatch> createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,Dispatch const & d) const2781 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, 2782 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2783 VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, 2784 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2785 { 2786 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2787 return static_cast<Result>( d.vkCreatePipelineCache( m_device, 2788 reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), 2789 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2790 reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) ); 2791 } 2792 2793 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2794 template <typename Dispatch> 2795 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2796 Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, 2797 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2798 Dispatch const & d ) const 2799 { 2800 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2801 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2802 VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" ); 2803 # endif 2804 2805 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2806 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2807 d.vkCreatePipelineCache( m_device, 2808 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2809 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2810 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2811 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); 2812 2813 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineCache ) ); 2814 } 2815 2816 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2817 template <typename Dispatch> 2818 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type createPipelineCacheUnique(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2819 Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, 2820 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2821 Dispatch const & d ) const 2822 { 2823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2824 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2825 VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" ); 2826 # endif 2827 2828 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2829 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2830 d.vkCreatePipelineCache( m_device, 2831 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2832 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2833 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2834 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); 2835 2836 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2837 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2838 } 2839 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2840 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2841 2842 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2843 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2844 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2845 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2846 { 2847 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2848 d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2849 } 2850 2851 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2852 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2853 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2854 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2855 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2856 { 2857 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2858 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2859 VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" ); 2860 # endif 2861 2862 d.vkDestroyPipelineCache( m_device, 2863 static_cast<VkPipelineCache>( pipelineCache ), 2864 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2865 } 2866 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2867 2868 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2869 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2870 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2871 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2872 { 2873 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2874 d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2875 } 2876 2877 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2878 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2879 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2880 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2881 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2882 { 2883 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2884 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2885 VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" ); 2886 # endif 2887 2888 d.vkDestroyPipelineCache( m_device, 2889 static_cast<VkPipelineCache>( pipelineCache ), 2890 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2891 } 2892 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2893 2894 template <typename Dispatch> getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,size_t * pDataSize,void * pData,Dispatch const & d) const2895 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2896 size_t * pDataSize, 2897 void * pData, 2898 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2899 { 2900 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2901 return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) ); 2902 } 2903 2904 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2905 template <typename Uint8_tAllocator, typename Dispatch> 2906 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Dispatch const & d) const2907 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const 2908 { 2909 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2910 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2911 VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" ); 2912 # endif 2913 2914 std::vector<uint8_t, Uint8_tAllocator> data; 2915 size_t dataSize; 2916 VULKAN_HPP_NAMESPACE::Result result; 2917 do 2918 { 2919 result = 2920 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 2921 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 2922 { 2923 data.resize( dataSize ); 2924 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2925 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 2926 } 2927 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 2928 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 2929 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 2930 if ( dataSize < data.size() ) 2931 { 2932 data.resize( dataSize ); 2933 } 2934 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 2935 } 2936 2937 template <typename Uint8_tAllocator, 2938 typename Dispatch, 2939 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 2940 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const2941 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 2942 { 2943 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2944 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2945 VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" ); 2946 # endif 2947 2948 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 2949 size_t dataSize; 2950 VULKAN_HPP_NAMESPACE::Result result; 2951 do 2952 { 2953 result = 2954 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 2955 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 2956 { 2957 data.resize( dataSize ); 2958 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2959 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 2960 } 2961 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 2962 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 2963 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 2964 if ( dataSize < data.size() ) 2965 { 2966 data.resize( dataSize ); 2967 } 2968 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 2969 } 2970 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2971 2972 template <typename Dispatch> mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,Dispatch const & d) const2973 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 2974 uint32_t srcCacheCount, 2975 const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, 2976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2977 { 2978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2979 return static_cast<Result>( 2980 d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) ); 2981 } 2982 2983 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2984 template <typename Dispatch> 2985 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,Dispatch const & d) const2986 Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 2987 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, 2988 Dispatch const & d ) const 2989 { 2990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2991 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2992 VULKAN_HPP_ASSERT( d.vkMergePipelineCaches && "Function <vkMergePipelineCaches> requires <VK_VERSION_1_0>" ); 2993 # endif 2994 2995 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergePipelineCaches( 2996 m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) ); 2997 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); 2998 2999 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 3000 } 3001 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3002 3003 template <typename Dispatch> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const3004 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3005 uint32_t createInfoCount, 3006 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, 3007 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3008 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 3009 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3010 { 3011 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3012 return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, 3013 static_cast<VkPipelineCache>( pipelineCache ), 3014 createInfoCount, 3015 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), 3016 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3017 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 3018 } 3019 3020 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3021 template <typename PipelineAllocator, typename Dispatch> 3022 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3023 Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3024 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3025 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3026 Dispatch const & d ) const 3027 { 3028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3029 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3030 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3031 # endif 3032 3033 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 3034 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3035 m_device, 3036 static_cast<VkPipelineCache>( pipelineCache ), 3037 createInfos.size(), 3038 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3039 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3040 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3041 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3042 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 3043 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3044 3045 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3046 } 3047 3048 template <typename PipelineAllocator, 3049 typename Dispatch, 3050 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 3051 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3052 Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3053 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3054 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3055 PipelineAllocator & pipelineAllocator, 3056 Dispatch const & d ) const 3057 { 3058 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3059 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3060 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3061 # endif 3062 3063 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 3064 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3065 m_device, 3066 static_cast<VkPipelineCache>( pipelineCache ), 3067 createInfos.size(), 3068 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3069 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3070 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3071 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3072 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 3073 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3074 3075 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3076 } 3077 3078 template <typename Dispatch> 3079 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createGraphicsPipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3080 Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3081 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 3082 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3083 Dispatch const & d ) const 3084 { 3085 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3086 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3087 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3088 # endif 3089 3090 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3091 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3092 m_device, 3093 static_cast<VkPipelineCache>( pipelineCache ), 3094 1, 3095 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 3096 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3097 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3098 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3099 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", 3100 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3101 3102 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 3103 } 3104 3105 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3106 template <typename Dispatch, typename PipelineAllocator> 3107 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3108 Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3109 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3110 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3111 Dispatch const & d ) const 3112 { 3113 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3114 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3115 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3116 # endif 3117 3118 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3119 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3120 m_device, 3121 static_cast<VkPipelineCache>( pipelineCache ), 3122 createInfos.size(), 3123 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3124 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3125 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3126 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3127 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 3128 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3129 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 3130 uniquePipelines.reserve( createInfos.size() ); 3131 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3132 for ( auto const & pipeline : pipelines ) 3133 { 3134 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3135 } 3136 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3137 } 3138 3139 template < 3140 typename Dispatch, 3141 typename PipelineAllocator, 3142 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 3143 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3144 Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3145 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3146 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3147 PipelineAllocator & pipelineAllocator, 3148 Dispatch const & d ) const 3149 { 3150 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3151 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3152 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3153 # endif 3154 3155 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3156 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3157 m_device, 3158 static_cast<VkPipelineCache>( pipelineCache ), 3159 createInfos.size(), 3160 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3161 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3162 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3163 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3164 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 3165 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3166 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 3167 uniquePipelines.reserve( createInfos.size() ); 3168 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3169 for ( auto const & pipeline : pipelines ) 3170 { 3171 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3172 } 3173 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3174 } 3175 3176 template <typename Dispatch> 3177 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createGraphicsPipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3178 Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3179 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 3180 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3181 Dispatch const & d ) const 3182 { 3183 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3184 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3185 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3186 # endif 3187 3188 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3189 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3190 m_device, 3191 static_cast<VkPipelineCache>( pipelineCache ), 3192 1, 3193 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 3194 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3195 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3196 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3197 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", 3198 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3199 3200 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 3201 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3202 } 3203 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3204 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3205 3206 template <typename Dispatch> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const3207 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3208 uint32_t createInfoCount, 3209 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, 3210 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3211 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 3212 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3213 { 3214 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3215 return static_cast<Result>( d.vkCreateComputePipelines( m_device, 3216 static_cast<VkPipelineCache>( pipelineCache ), 3217 createInfoCount, 3218 reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), 3219 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3220 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 3221 } 3222 3223 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3224 template <typename PipelineAllocator, typename Dispatch> 3225 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3226 Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3227 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3228 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3229 Dispatch const & d ) const 3230 { 3231 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3232 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3233 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3234 # endif 3235 3236 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 3237 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3238 m_device, 3239 static_cast<VkPipelineCache>( pipelineCache ), 3240 createInfos.size(), 3241 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3242 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3243 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3244 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3245 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 3246 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3247 3248 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3249 } 3250 3251 template <typename PipelineAllocator, 3252 typename Dispatch, 3253 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 3254 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3255 Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3256 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3257 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3258 PipelineAllocator & pipelineAllocator, 3259 Dispatch const & d ) const 3260 { 3261 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3262 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3263 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3264 # endif 3265 3266 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 3267 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3268 m_device, 3269 static_cast<VkPipelineCache>( pipelineCache ), 3270 createInfos.size(), 3271 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3272 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3273 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3274 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3275 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 3276 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3277 3278 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3279 } 3280 3281 template <typename Dispatch> 3282 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createComputePipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3283 Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3284 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 3285 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3286 Dispatch const & d ) const 3287 { 3288 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3289 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3290 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3291 # endif 3292 3293 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3294 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3295 m_device, 3296 static_cast<VkPipelineCache>( pipelineCache ), 3297 1, 3298 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 3299 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3300 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3301 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3302 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", 3303 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3304 3305 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 3306 } 3307 3308 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3309 template <typename Dispatch, typename PipelineAllocator> 3310 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3311 Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3312 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3313 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3314 Dispatch const & d ) const 3315 { 3316 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3317 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3318 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3319 # endif 3320 3321 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3322 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3323 m_device, 3324 static_cast<VkPipelineCache>( pipelineCache ), 3325 createInfos.size(), 3326 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3327 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3328 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3329 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3330 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 3331 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3332 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 3333 uniquePipelines.reserve( createInfos.size() ); 3334 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3335 for ( auto const & pipeline : pipelines ) 3336 { 3337 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3338 } 3339 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3340 } 3341 3342 template < 3343 typename Dispatch, 3344 typename PipelineAllocator, 3345 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 3346 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3347 Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3348 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3349 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3350 PipelineAllocator & pipelineAllocator, 3351 Dispatch const & d ) const 3352 { 3353 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3354 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3355 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3356 # endif 3357 3358 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3359 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3360 m_device, 3361 static_cast<VkPipelineCache>( pipelineCache ), 3362 createInfos.size(), 3363 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3364 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3365 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3366 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3367 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 3368 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3369 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 3370 uniquePipelines.reserve( createInfos.size() ); 3371 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3372 for ( auto const & pipeline : pipelines ) 3373 { 3374 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3375 } 3376 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3377 } 3378 3379 template <typename Dispatch> 3380 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createComputePipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3381 Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3382 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 3383 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3384 Dispatch const & d ) const 3385 { 3386 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3387 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3388 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3389 # endif 3390 3391 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3392 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3393 m_device, 3394 static_cast<VkPipelineCache>( pipelineCache ), 3395 1, 3396 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 3397 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3398 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3399 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3400 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", 3401 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3402 3403 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 3404 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3405 } 3406 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3407 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3408 3409 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3410 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3411 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3412 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3413 { 3414 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3415 d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3416 } 3417 3418 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3419 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3420 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3421 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3422 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3423 { 3424 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3425 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3426 VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" ); 3427 # endif 3428 3429 d.vkDestroyPipeline( m_device, 3430 static_cast<VkPipeline>( pipeline ), 3431 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3432 } 3433 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3434 3435 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3436 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3437 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3438 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3439 { 3440 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3441 d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3442 } 3443 3444 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3445 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3446 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3447 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3448 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3449 { 3450 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3451 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3452 VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" ); 3453 # endif 3454 3455 d.vkDestroyPipeline( m_device, 3456 static_cast<VkPipeline>( pipeline ), 3457 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3458 } 3459 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3460 3461 template <typename Dispatch> createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,Dispatch const & d) const3462 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, 3463 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3464 VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, 3465 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3466 { 3467 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3468 return static_cast<Result>( d.vkCreatePipelineLayout( m_device, 3469 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), 3470 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3471 reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) ); 3472 } 3473 3474 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3475 template <typename Dispatch> 3476 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3477 Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, 3478 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3479 Dispatch const & d ) const 3480 { 3481 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3482 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3483 VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" ); 3484 # endif 3485 3486 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3487 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3488 d.vkCreatePipelineLayout( m_device, 3489 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3490 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3491 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3492 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); 3493 3494 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineLayout ) ); 3495 } 3496 3497 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3498 template <typename Dispatch> 3499 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type createPipelineLayoutUnique(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3500 Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, 3501 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3502 Dispatch const & d ) const 3503 { 3504 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3505 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3506 VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" ); 3507 # endif 3508 3509 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3510 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3511 d.vkCreatePipelineLayout( m_device, 3512 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3513 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3514 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3515 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); 3516 3517 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3518 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3519 } 3520 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3521 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3522 3523 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3524 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3525 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3526 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3527 { 3528 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3529 d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3530 } 3531 3532 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3533 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3534 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3535 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3536 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3537 { 3538 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3539 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3540 VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" ); 3541 # endif 3542 3543 d.vkDestroyPipelineLayout( m_device, 3544 static_cast<VkPipelineLayout>( pipelineLayout ), 3545 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3546 } 3547 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3548 3549 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3550 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3551 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3552 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3553 { 3554 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3555 d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3556 } 3557 3558 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3559 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3560 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3561 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3562 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3563 { 3564 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3565 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3566 VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" ); 3567 # endif 3568 3569 d.vkDestroyPipelineLayout( m_device, 3570 static_cast<VkPipelineLayout>( pipelineLayout ), 3571 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3572 } 3573 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3574 3575 template <typename Dispatch> createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Sampler * pSampler,Dispatch const & d) const3576 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, 3577 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3578 VULKAN_HPP_NAMESPACE::Sampler * pSampler, 3579 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3580 { 3581 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3582 return static_cast<Result>( d.vkCreateSampler( m_device, 3583 reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), 3584 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3585 reinterpret_cast<VkSampler *>( pSampler ) ) ); 3586 } 3587 3588 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3589 template <typename Dispatch> createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3590 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( 3591 const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 3592 { 3593 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3594 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3595 VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" ); 3596 # endif 3597 3598 VULKAN_HPP_NAMESPACE::Sampler sampler; 3599 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3600 d.vkCreateSampler( m_device, 3601 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3602 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3603 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3604 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); 3605 3606 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sampler ) ); 3607 } 3608 3609 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3610 template <typename Dispatch> createSamplerUnique(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3611 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( 3612 const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 3613 { 3614 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3615 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3616 VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" ); 3617 # endif 3618 3619 VULKAN_HPP_NAMESPACE::Sampler sampler; 3620 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3621 d.vkCreateSampler( m_device, 3622 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3623 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3624 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3625 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); 3626 3627 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3628 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3629 } 3630 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3631 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3632 3633 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3634 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3635 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3636 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3637 { 3638 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3639 d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3640 } 3641 3642 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3643 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3644 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3645 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3646 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3647 { 3648 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3649 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3650 VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" ); 3651 # endif 3652 3653 d.vkDestroySampler( m_device, 3654 static_cast<VkSampler>( sampler ), 3655 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3656 } 3657 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3658 3659 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3660 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3661 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3662 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3663 { 3664 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3665 d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3666 } 3667 3668 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3669 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3670 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3671 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3672 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3673 { 3674 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3675 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3676 VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" ); 3677 # endif 3678 3679 d.vkDestroySampler( m_device, 3680 static_cast<VkSampler>( sampler ), 3681 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3682 } 3683 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3684 3685 template <typename Dispatch> createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,Dispatch const & d) const3686 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 3687 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3688 VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, 3689 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3690 { 3691 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3692 return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, 3693 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), 3694 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3695 reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) ); 3696 } 3697 3698 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3699 template <typename Dispatch> 3700 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3701 Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 3702 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3703 Dispatch const & d ) const 3704 { 3705 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3706 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3707 VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3708 # endif 3709 3710 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3711 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout( 3712 m_device, 3713 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3714 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3715 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3716 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); 3717 3718 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( setLayout ) ); 3719 } 3720 3721 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3722 template <typename Dispatch> 3723 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type createDescriptorSetLayoutUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3724 Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 3725 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3726 Dispatch const & d ) const 3727 { 3728 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3729 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3730 VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3731 # endif 3732 3733 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3734 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout( 3735 m_device, 3736 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3737 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3738 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3739 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); 3740 3741 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3742 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3743 } 3744 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3745 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3746 3747 template <typename Dispatch> destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3748 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3749 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3750 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3751 { 3752 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3753 d.vkDestroyDescriptorSetLayout( 3754 m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3755 } 3756 3757 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3758 template <typename Dispatch> destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3759 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3760 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3761 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3762 { 3763 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3764 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3765 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3766 # endif 3767 3768 d.vkDestroyDescriptorSetLayout( 3769 m_device, 3770 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3771 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3772 } 3773 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3774 3775 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3776 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3777 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3778 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3779 { 3780 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3781 d.vkDestroyDescriptorSetLayout( 3782 m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3783 } 3784 3785 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3786 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3787 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3788 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3789 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3790 { 3791 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3792 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3793 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3794 # endif 3795 3796 d.vkDestroyDescriptorSetLayout( 3797 m_device, 3798 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3799 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3800 } 3801 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3802 3803 template <typename Dispatch> createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,Dispatch const & d) const3804 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, 3805 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3806 VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, 3807 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3808 { 3809 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3810 return static_cast<Result>( d.vkCreateDescriptorPool( m_device, 3811 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), 3812 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3813 reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) ); 3814 } 3815 3816 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3817 template <typename Dispatch> 3818 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3819 Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, 3820 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3821 Dispatch const & d ) const 3822 { 3823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3824 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3825 VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" ); 3826 # endif 3827 3828 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3829 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3830 d.vkCreateDescriptorPool( m_device, 3831 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3832 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3833 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3834 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); 3835 3836 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorPool ) ); 3837 } 3838 3839 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3840 template <typename Dispatch> 3841 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type createDescriptorPoolUnique(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3842 Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, 3843 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3844 Dispatch const & d ) const 3845 { 3846 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3847 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3848 VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" ); 3849 # endif 3850 3851 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3852 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3853 d.vkCreateDescriptorPool( m_device, 3854 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3855 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3856 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3857 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); 3858 3859 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3860 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3861 } 3862 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3863 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3864 3865 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3866 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3867 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3868 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3869 { 3870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3871 d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3872 } 3873 3874 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3875 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3876 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3877 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3878 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3879 { 3880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3881 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3882 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" ); 3883 # endif 3884 3885 d.vkDestroyDescriptorPool( m_device, 3886 static_cast<VkDescriptorPool>( descriptorPool ), 3887 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3888 } 3889 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3890 3891 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3892 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3893 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3894 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3895 { 3896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3897 d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3898 } 3899 3900 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3901 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3902 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3903 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3904 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3905 { 3906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3907 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3908 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" ); 3909 # endif 3910 3911 d.vkDestroyDescriptorPool( m_device, 3912 static_cast<VkDescriptorPool>( descriptorPool ), 3913 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3914 } 3915 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3916 3917 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 3918 template <typename Dispatch> resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3919 VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3920 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3921 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3922 { 3923 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3924 return static_cast<Result>( 3925 d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); 3926 } 3927 #else 3928 template <typename Dispatch> resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3929 VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3930 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3931 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3932 { 3933 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3934 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3935 VULKAN_HPP_ASSERT( d.vkResetDescriptorPool && "Function <vkResetDescriptorPool> requires <VK_VERSION_1_0>" ); 3936 # endif 3937 3938 d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ); 3939 } 3940 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3941 3942 template <typename Dispatch> allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const3943 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, 3944 VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 3945 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3946 { 3947 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3948 return static_cast<Result>( d.vkAllocateDescriptorSets( 3949 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) ); 3950 } 3951 3952 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3953 template <typename DescriptorSetAllocator, typename Dispatch> 3954 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const3955 Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 3956 { 3957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3958 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3959 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 3960 # endif 3961 3962 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount ); 3963 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 3964 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3965 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 3966 3967 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); 3968 } 3969 3970 template <typename DescriptorSetAllocator, 3971 typename Dispatch, 3972 typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, VULKAN_HPP_NAMESPACE::DescriptorSet>::value, int>::type> 3973 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const3974 Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, 3975 DescriptorSetAllocator & descriptorSetAllocator, 3976 Dispatch const & d ) const 3977 { 3978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3979 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3980 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 3981 # endif 3982 3983 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); 3984 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 3985 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3986 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 3987 3988 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); 3989 } 3990 3991 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3992 template <typename Dispatch, typename DescriptorSetAllocator> 3993 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 3994 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const3995 Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 3996 { 3997 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3998 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3999 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 4000 # endif 4001 4002 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 4003 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 4004 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 4005 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 4006 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets; 4007 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 4008 PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 4009 for ( auto const & descriptorSet : descriptorSets ) 4010 { 4011 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); 4012 } 4013 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); 4014 } 4015 4016 template < 4017 typename Dispatch, 4018 typename DescriptorSetAllocator, 4019 typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>::value, 4020 int>::type> 4021 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4022 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const4023 Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, 4024 DescriptorSetAllocator & descriptorSetAllocator, 4025 Dispatch const & d ) const 4026 { 4027 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4028 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4029 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 4030 # endif 4031 4032 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 4033 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 4034 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 4035 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 4036 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); 4037 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 4038 PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 4039 for ( auto const & descriptorSet : descriptorSets ) 4040 { 4041 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); 4042 } 4043 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); 4044 } 4045 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4046 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4047 4048 template <typename Dispatch> freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const4049 VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4050 uint32_t descriptorSetCount, 4051 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4052 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4053 { 4054 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4055 return static_cast<Result>( d.vkFreeDescriptorSets( 4056 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 4057 } 4058 4059 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4060 template <typename Dispatch> freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,Dispatch const & d) const4061 VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4062 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4063 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4064 { 4065 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4066 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4067 VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" ); 4068 # endif 4069 4070 d.vkFreeDescriptorSets( 4071 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); 4072 } 4073 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4074 4075 template <typename Dispatch> Result(Device::free)4076 VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4077 uint32_t descriptorSetCount, 4078 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4079 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4080 { 4081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4082 return static_cast<Result>( d.vkFreeDescriptorSets( 4083 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 4084 } 4085 4086 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4087 template <typename Dispatch> 4088 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4089 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4090 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4091 { 4092 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4093 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4094 VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" ); 4095 # endif 4096 4097 d.vkFreeDescriptorSets( 4098 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); 4099 } 4100 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4101 4102 template <typename Dispatch> updateDescriptorSets(uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,Dispatch const & d) const4103 VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, 4104 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 4105 uint32_t descriptorCopyCount, 4106 const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, 4107 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4108 { 4109 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4110 d.vkUpdateDescriptorSets( m_device, 4111 descriptorWriteCount, 4112 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), 4113 descriptorCopyCount, 4114 reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) ); 4115 } 4116 4117 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4118 template <typename Dispatch> 4119 VULKAN_HPP_INLINE void updateDescriptorSets(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,Dispatch const & d) const4120 Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 4121 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, 4122 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4123 { 4124 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4125 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4126 VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSets && "Function <vkUpdateDescriptorSets> requires <VK_VERSION_1_0>" ); 4127 # endif 4128 4129 d.vkUpdateDescriptorSets( m_device, 4130 descriptorWrites.size(), 4131 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), 4132 descriptorCopies.size(), 4133 reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) ); 4134 } 4135 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4136 4137 template <typename Dispatch> createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,Dispatch const & d) const4138 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, 4139 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4140 VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, 4141 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4142 { 4143 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4144 return static_cast<Result>( d.vkCreateFramebuffer( m_device, 4145 reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), 4146 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 4147 reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) ); 4148 } 4149 4150 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4151 template <typename Dispatch> 4152 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4153 Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, 4154 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4155 Dispatch const & d ) const 4156 { 4157 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4158 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4159 VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" ); 4160 # endif 4161 4162 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 4163 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4164 d.vkCreateFramebuffer( m_device, 4165 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 4166 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4167 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 4168 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); 4169 4170 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( framebuffer ) ); 4171 } 4172 4173 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4174 template <typename Dispatch> 4175 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type createFramebufferUnique(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4176 Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, 4177 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4178 Dispatch const & d ) const 4179 { 4180 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4181 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4182 VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" ); 4183 # endif 4184 4185 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 4186 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4187 d.vkCreateFramebuffer( m_device, 4188 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 4189 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4190 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 4191 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); 4192 4193 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 4194 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4195 } 4196 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4197 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4198 4199 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4200 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4201 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4202 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4203 { 4204 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4205 d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4206 } 4207 4208 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4209 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4210 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4211 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4212 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4213 { 4214 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4215 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4216 VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" ); 4217 # endif 4218 4219 d.vkDestroyFramebuffer( m_device, 4220 static_cast<VkFramebuffer>( framebuffer ), 4221 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4222 } 4223 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4224 4225 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4226 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4227 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4228 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4229 { 4230 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4231 d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4232 } 4233 4234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4235 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4236 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4237 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4238 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4239 { 4240 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4241 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4242 VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" ); 4243 # endif 4244 4245 d.vkDestroyFramebuffer( m_device, 4246 static_cast<VkFramebuffer>( framebuffer ), 4247 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4248 } 4249 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4250 4251 template <typename Dispatch> createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const4252 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, 4253 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4254 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 4255 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4256 { 4257 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4258 return static_cast<Result>( d.vkCreateRenderPass( m_device, 4259 reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), 4260 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 4261 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 4262 } 4263 4264 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4265 template <typename Dispatch> 4266 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4267 Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, 4268 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4269 Dispatch const & d ) const 4270 { 4271 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4272 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4273 VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" ); 4274 # endif 4275 4276 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 4277 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4278 d.vkCreateRenderPass( m_device, 4279 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 4280 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4281 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 4282 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); 4283 4284 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); 4285 } 4286 4287 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4288 template <typename Dispatch> 4289 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPassUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4290 Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, 4291 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4292 Dispatch const & d ) const 4293 { 4294 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4295 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4296 VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" ); 4297 # endif 4298 4299 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 4300 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4301 d.vkCreateRenderPass( m_device, 4302 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 4303 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4304 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 4305 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); 4306 4307 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 4308 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4309 } 4310 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4311 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4312 4313 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4314 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4315 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4316 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4317 { 4318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4319 d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4320 } 4321 4322 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4323 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4324 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4325 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4326 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4327 { 4328 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4329 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4330 VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" ); 4331 # endif 4332 4333 d.vkDestroyRenderPass( m_device, 4334 static_cast<VkRenderPass>( renderPass ), 4335 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4336 } 4337 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4338 4339 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4340 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4341 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4342 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4343 { 4344 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4345 d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4346 } 4347 4348 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4349 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4350 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4351 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4352 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4353 { 4354 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4355 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4356 VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" ); 4357 # endif 4358 4359 d.vkDestroyRenderPass( m_device, 4360 static_cast<VkRenderPass>( renderPass ), 4361 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4362 } 4363 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4364 4365 template <typename Dispatch> getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const4366 VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4367 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 4368 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4369 { 4370 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4371 d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 4372 } 4373 4374 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4375 template <typename Dispatch> getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Dispatch const & d) const4376 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4377 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4378 { 4379 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4380 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4381 VULKAN_HPP_ASSERT( d.vkGetRenderAreaGranularity && "Function <vkGetRenderAreaGranularity> requires <VK_VERSION_1_0>" ); 4382 # endif 4383 4384 VULKAN_HPP_NAMESPACE::Extent2D granularity; 4385 d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 4386 4387 return granularity; 4388 } 4389 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4390 4391 template <typename Dispatch> createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,Dispatch const & d) const4392 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, 4393 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4394 VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, 4395 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4396 { 4397 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4398 return static_cast<Result>( d.vkCreateCommandPool( m_device, 4399 reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), 4400 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 4401 reinterpret_cast<VkCommandPool *>( pCommandPool ) ) ); 4402 } 4403 4404 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4405 template <typename Dispatch> 4406 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4407 Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, 4408 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4409 Dispatch const & d ) const 4410 { 4411 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4412 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4413 VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" ); 4414 # endif 4415 4416 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 4417 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4418 d.vkCreateCommandPool( m_device, 4419 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 4420 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4421 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 4422 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); 4423 4424 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandPool ) ); 4425 } 4426 4427 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4428 template <typename Dispatch> 4429 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type createCommandPoolUnique(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4430 Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, 4431 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4432 Dispatch const & d ) const 4433 { 4434 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4435 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4436 VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" ); 4437 # endif 4438 4439 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 4440 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4441 d.vkCreateCommandPool( m_device, 4442 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 4443 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4444 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 4445 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); 4446 4447 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 4448 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4449 } 4450 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4451 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4452 4453 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4454 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4455 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4456 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4457 { 4458 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4459 d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4460 } 4461 4462 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4463 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4464 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4465 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4466 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4467 { 4468 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4469 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4470 VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" ); 4471 # endif 4472 4473 d.vkDestroyCommandPool( m_device, 4474 static_cast<VkCommandPool>( commandPool ), 4475 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4476 } 4477 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4478 4479 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4480 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4481 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4482 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4483 { 4484 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4485 d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4486 } 4487 4488 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4489 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4490 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4491 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4492 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4493 { 4494 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4495 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4496 VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" ); 4497 # endif 4498 4499 d.vkDestroyCommandPool( m_device, 4500 static_cast<VkCommandPool>( commandPool ), 4501 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4502 } 4503 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4504 4505 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4506 template <typename Dispatch> resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const4507 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4508 VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, 4509 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4510 { 4511 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4512 return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4513 } 4514 #else 4515 template <typename Dispatch> 4516 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const4517 Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const 4518 { 4519 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4520 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4521 VULKAN_HPP_ASSERT( d.vkResetCommandPool && "Function <vkResetCommandPool> requires <VK_VERSION_1_0>" ); 4522 # endif 4523 4524 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4525 d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4526 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); 4527 4528 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4529 } 4530 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4531 4532 template <typename Dispatch> allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4533 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, 4534 VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4535 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4536 { 4537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4538 return static_cast<Result>( d.vkAllocateCommandBuffers( 4539 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) ); 4540 } 4541 4542 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4543 template <typename CommandBufferAllocator, typename Dispatch> 4544 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4545 Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4546 { 4547 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4548 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4549 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4550 # endif 4551 4552 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount ); 4553 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4554 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4555 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4556 4557 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); 4558 } 4559 4560 template <typename CommandBufferAllocator, 4561 typename Dispatch, 4562 typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, VULKAN_HPP_NAMESPACE::CommandBuffer>::value, int>::type> 4563 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4564 Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, 4565 CommandBufferAllocator & commandBufferAllocator, 4566 Dispatch const & d ) const 4567 { 4568 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4569 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4570 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4571 # endif 4572 4573 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); 4574 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4575 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4576 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4577 4578 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); 4579 } 4580 4581 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4582 template <typename Dispatch, typename CommandBufferAllocator> 4583 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4584 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4585 Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4586 { 4587 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4588 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4589 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4590 # endif 4591 4592 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4593 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4594 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4595 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4596 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers; 4597 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4598 PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4599 for ( auto const & commandBuffer : commandBuffers ) 4600 { 4601 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); 4602 } 4603 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); 4604 } 4605 4606 template < 4607 typename Dispatch, 4608 typename CommandBufferAllocator, 4609 typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>::value, 4610 int>::type> 4611 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4612 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4613 Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, 4614 CommandBufferAllocator & commandBufferAllocator, 4615 Dispatch const & d ) const 4616 { 4617 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4618 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4619 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4620 # endif 4621 4622 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4623 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4624 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4625 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4626 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); 4627 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4628 PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4629 for ( auto const & commandBuffer : commandBuffers ) 4630 { 4631 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); 4632 } 4633 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); 4634 } 4635 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4636 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4637 4638 template <typename Dispatch> freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4639 VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4640 uint32_t commandBufferCount, 4641 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4642 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4643 { 4644 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4645 d.vkFreeCommandBuffers( 4646 m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4647 } 4648 4649 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4650 template <typename Dispatch> freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const4651 VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4652 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4653 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4654 { 4655 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4656 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4657 VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" ); 4658 # endif 4659 4660 d.vkFreeCommandBuffers( 4661 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4662 } 4663 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4664 4665 template <typename Dispatch> 4666 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4667 uint32_t commandBufferCount, 4668 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4669 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4670 { 4671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4672 d.vkFreeCommandBuffers( 4673 m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4674 } 4675 4676 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4677 template <typename Dispatch> 4678 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4679 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4680 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4681 { 4682 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4683 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4684 VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" ); 4685 # endif 4686 4687 d.vkFreeCommandBuffers( 4688 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4689 } 4690 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4691 4692 template <typename Dispatch> begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,Dispatch const & d) const4693 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, 4694 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4695 { 4696 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4697 return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) ); 4698 } 4699 4700 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4701 template <typename Dispatch> 4702 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo,Dispatch const & d) const4703 CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const 4704 { 4705 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4706 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4707 VULKAN_HPP_ASSERT( d.vkBeginCommandBuffer && "Function <vkBeginCommandBuffer> requires <VK_VERSION_1_0>" ); 4708 # endif 4709 4710 VULKAN_HPP_NAMESPACE::Result result = 4711 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) ); 4712 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); 4713 4714 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4715 } 4716 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4717 4718 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4719 template <typename Dispatch> end(Dispatch const & d) const4720 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4721 { 4722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4723 return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); 4724 } 4725 #else 4726 template <typename Dispatch> end(Dispatch const & d) const4727 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const 4728 { 4729 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4730 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4731 VULKAN_HPP_ASSERT( d.vkEndCommandBuffer && "Function <vkEndCommandBuffer> requires <VK_VERSION_1_0>" ); 4732 # endif 4733 4734 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); 4735 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); 4736 4737 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4738 } 4739 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4740 4741 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4742 template <typename Dispatch> reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4743 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, 4744 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4745 { 4746 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4747 return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4748 } 4749 #else 4750 template <typename Dispatch> reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4751 VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const 4752 { 4753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4754 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4755 VULKAN_HPP_ASSERT( d.vkResetCommandBuffer && "Function <vkResetCommandBuffer> requires <VK_VERSION_1_0>" ); 4756 # endif 4757 4758 VULKAN_HPP_NAMESPACE::Result result = 4759 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4760 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); 4761 4762 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4763 } 4764 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4765 4766 template <typename Dispatch> bindPipeline(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const4767 VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4768 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 4769 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4770 { 4771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4772 d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); 4773 } 4774 4775 template <typename Dispatch> setViewport(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const4776 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4777 uint32_t viewportCount, 4778 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 4779 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4780 { 4781 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4782 d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 4783 } 4784 4785 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4786 template <typename Dispatch> setViewport(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const4787 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4788 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 4789 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4790 { 4791 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4792 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4793 VULKAN_HPP_ASSERT( d.vkCmdSetViewport && "Function <vkCmdSetViewport> requires <VK_VERSION_1_0>" ); 4794 # endif 4795 4796 d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 4797 } 4798 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4799 4800 template <typename Dispatch> setScissor(uint32_t firstScissor,uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const4801 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4802 uint32_t scissorCount, 4803 const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, 4804 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4805 { 4806 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4807 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 4808 } 4809 4810 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4811 template <typename Dispatch> setScissor(uint32_t firstScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const4812 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4813 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 4814 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4815 { 4816 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4817 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4818 VULKAN_HPP_ASSERT( d.vkCmdSetScissor && "Function <vkCmdSetScissor> requires <VK_VERSION_1_0>" ); 4819 # endif 4820 4821 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 4822 } 4823 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4824 4825 template <typename Dispatch> setLineWidth(float lineWidth,Dispatch const & d) const4826 VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4827 { 4828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4829 d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); 4830 } 4831 4832 template <typename Dispatch> 4833 VULKAN_HPP_INLINE void setDepthBias(float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor,Dispatch const & d) const4834 CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4835 { 4836 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4837 d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); 4838 } 4839 4840 template <typename Dispatch> setBlendConstants(const float blendConstants[4],Dispatch const & d) const4841 VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4842 { 4843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4844 d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); 4845 } 4846 4847 template <typename Dispatch> setDepthBounds(float minDepthBounds,float maxDepthBounds,Dispatch const & d) const4848 VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4849 { 4850 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4851 d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); 4852 } 4853 4854 template <typename Dispatch> 4855 VULKAN_HPP_INLINE void setStencilCompareMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t compareMask,Dispatch const & d) const4856 CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4857 { 4858 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4859 d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask ); 4860 } 4861 4862 template <typename Dispatch> 4863 VULKAN_HPP_INLINE void setStencilWriteMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t writeMask,Dispatch const & d) const4864 CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4865 { 4866 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4867 d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask ); 4868 } 4869 4870 template <typename Dispatch> 4871 VULKAN_HPP_INLINE void setStencilReference(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t reference,Dispatch const & d) const4872 CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4873 { 4874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4875 d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference ); 4876 } 4877 4878 template <typename Dispatch> bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets,Dispatch const & d) const4879 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4880 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 4881 uint32_t firstSet, 4882 uint32_t descriptorSetCount, 4883 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4884 uint32_t dynamicOffsetCount, 4885 const uint32_t * pDynamicOffsets, 4886 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4887 { 4888 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4889 d.vkCmdBindDescriptorSets( m_commandBuffer, 4890 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 4891 static_cast<VkPipelineLayout>( layout ), 4892 firstSet, 4893 descriptorSetCount, 4894 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), 4895 dynamicOffsetCount, 4896 pDynamicOffsets ); 4897 } 4898 4899 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4900 template <typename Dispatch> bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,Dispatch const & d) const4901 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4902 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 4903 uint32_t firstSet, 4904 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4905 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets, 4906 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4907 { 4908 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4909 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4910 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets && "Function <vkCmdBindDescriptorSets> requires <VK_VERSION_1_0>" ); 4911 # endif 4912 4913 d.vkCmdBindDescriptorSets( m_commandBuffer, 4914 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 4915 static_cast<VkPipelineLayout>( layout ), 4916 firstSet, 4917 descriptorSets.size(), 4918 reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), 4919 dynamicOffsets.size(), 4920 dynamicOffsets.data() ); 4921 } 4922 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4923 4924 template <typename Dispatch> bindIndexBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const4925 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 4926 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4927 VULKAN_HPP_NAMESPACE::IndexType indexType, 4928 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4929 { 4930 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4931 d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) ); 4932 } 4933 4934 template <typename Dispatch> bindVertexBuffers(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const4935 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 4936 uint32_t bindingCount, 4937 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 4938 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 4939 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4940 { 4941 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4942 d.vkCmdBindVertexBuffers( 4943 m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); 4944 } 4945 4946 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4947 template <typename Dispatch> bindVertexBuffers(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const4948 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 4949 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 4950 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 4951 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 4952 { 4953 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4954 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4955 VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers && "Function <vkCmdBindVertexBuffers> requires <VK_VERSION_1_0>" ); 4956 # endif 4957 # ifdef VULKAN_HPP_NO_EXCEPTIONS 4958 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 4959 # else 4960 if ( buffers.size() != offsets.size() ) 4961 { 4962 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); 4963 } 4964 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 4965 4966 d.vkCmdBindVertexBuffers( m_commandBuffer, 4967 firstBinding, 4968 buffers.size(), 4969 reinterpret_cast<const VkBuffer *>( buffers.data() ), 4970 reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); 4971 } 4972 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4973 4974 template <typename Dispatch> draw(uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance,Dispatch const & d) const4975 VULKAN_HPP_INLINE void CommandBuffer::draw( 4976 uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4977 { 4978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4979 d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); 4980 } 4981 4982 template <typename Dispatch> drawIndexed(uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance,Dispatch const & d) const4983 VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, 4984 uint32_t instanceCount, 4985 uint32_t firstIndex, 4986 int32_t vertexOffset, 4987 uint32_t firstInstance, 4988 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4989 { 4990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4991 d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); 4992 } 4993 4994 template <typename Dispatch> drawIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const4995 VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 4996 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4997 uint32_t drawCount, 4998 uint32_t stride, 4999 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5000 { 5001 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5002 d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 5003 } 5004 5005 template <typename Dispatch> drawIndexedIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const5006 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 5007 VULKAN_HPP_NAMESPACE::DeviceSize offset, 5008 uint32_t drawCount, 5009 uint32_t stride, 5010 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5011 { 5012 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5013 d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 5014 } 5015 5016 template <typename Dispatch> 5017 VULKAN_HPP_INLINE void dispatch(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const5018 CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5019 { 5020 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5021 d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 5022 } 5023 5024 template <typename Dispatch> dispatchIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const5025 VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 5026 VULKAN_HPP_NAMESPACE::DeviceSize offset, 5027 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5028 { 5029 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5030 d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); 5031 } 5032 5033 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,Dispatch const & d) const5034 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5035 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5036 uint32_t regionCount, 5037 const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, 5038 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5039 { 5040 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5041 d.vkCmdCopyBuffer( m_commandBuffer, 5042 static_cast<VkBuffer>( srcBuffer ), 5043 static_cast<VkBuffer>( dstBuffer ), 5044 regionCount, 5045 reinterpret_cast<const VkBufferCopy *>( pRegions ) ); 5046 } 5047 5048 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5049 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,Dispatch const & d) const5050 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5051 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5052 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, 5053 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5054 { 5055 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5056 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5057 VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer && "Function <vkCmdCopyBuffer> requires <VK_VERSION_1_0>" ); 5058 # endif 5059 5060 d.vkCmdCopyBuffer( m_commandBuffer, 5061 static_cast<VkBuffer>( srcBuffer ), 5062 static_cast<VkBuffer>( dstBuffer ), 5063 regions.size(), 5064 reinterpret_cast<const VkBufferCopy *>( regions.data() ) ); 5065 } 5066 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5067 5068 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,Dispatch const & d) const5069 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5070 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5071 VULKAN_HPP_NAMESPACE::Image dstImage, 5072 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5073 uint32_t regionCount, 5074 const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, 5075 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5076 { 5077 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5078 d.vkCmdCopyImage( m_commandBuffer, 5079 static_cast<VkImage>( srcImage ), 5080 static_cast<VkImageLayout>( srcImageLayout ), 5081 static_cast<VkImage>( dstImage ), 5082 static_cast<VkImageLayout>( dstImageLayout ), 5083 regionCount, 5084 reinterpret_cast<const VkImageCopy *>( pRegions ) ); 5085 } 5086 5087 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5088 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,Dispatch const & d) const5089 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5090 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5091 VULKAN_HPP_NAMESPACE::Image dstImage, 5092 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5093 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, 5094 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5095 { 5096 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5097 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5098 VULKAN_HPP_ASSERT( d.vkCmdCopyImage && "Function <vkCmdCopyImage> requires <VK_VERSION_1_0>" ); 5099 # endif 5100 5101 d.vkCmdCopyImage( m_commandBuffer, 5102 static_cast<VkImage>( srcImage ), 5103 static_cast<VkImageLayout>( srcImageLayout ), 5104 static_cast<VkImage>( dstImage ), 5105 static_cast<VkImageLayout>( dstImageLayout ), 5106 regions.size(), 5107 reinterpret_cast<const VkImageCopy *>( regions.data() ) ); 5108 } 5109 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5110 5111 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const5112 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5113 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5114 VULKAN_HPP_NAMESPACE::Image dstImage, 5115 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5116 uint32_t regionCount, 5117 const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, 5118 VULKAN_HPP_NAMESPACE::Filter filter, 5119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5120 { 5121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5122 d.vkCmdBlitImage( m_commandBuffer, 5123 static_cast<VkImage>( srcImage ), 5124 static_cast<VkImageLayout>( srcImageLayout ), 5125 static_cast<VkImage>( dstImage ), 5126 static_cast<VkImageLayout>( dstImageLayout ), 5127 regionCount, 5128 reinterpret_cast<const VkImageBlit *>( pRegions ), 5129 static_cast<VkFilter>( filter ) ); 5130 } 5131 5132 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5133 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const5134 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5135 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5136 VULKAN_HPP_NAMESPACE::Image dstImage, 5137 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5138 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, 5139 VULKAN_HPP_NAMESPACE::Filter filter, 5140 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5141 { 5142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5143 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5144 VULKAN_HPP_ASSERT( d.vkCmdBlitImage && "Function <vkCmdBlitImage> requires <VK_VERSION_1_0>" ); 5145 # endif 5146 5147 d.vkCmdBlitImage( m_commandBuffer, 5148 static_cast<VkImage>( srcImage ), 5149 static_cast<VkImageLayout>( srcImageLayout ), 5150 static_cast<VkImage>( dstImage ), 5151 static_cast<VkImageLayout>( dstImageLayout ), 5152 regions.size(), 5153 reinterpret_cast<const VkImageBlit *>( regions.data() ), 5154 static_cast<VkFilter>( filter ) ); 5155 } 5156 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5157 5158 template <typename Dispatch> copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const5159 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5160 VULKAN_HPP_NAMESPACE::Image dstImage, 5161 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5162 uint32_t regionCount, 5163 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 5164 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5165 { 5166 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5167 d.vkCmdCopyBufferToImage( m_commandBuffer, 5168 static_cast<VkBuffer>( srcBuffer ), 5169 static_cast<VkImage>( dstImage ), 5170 static_cast<VkImageLayout>( dstImageLayout ), 5171 regionCount, 5172 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 5173 } 5174 5175 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5176 template <typename Dispatch> copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const5177 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5178 VULKAN_HPP_NAMESPACE::Image dstImage, 5179 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5180 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 5181 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5182 { 5183 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5184 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5185 VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage && "Function <vkCmdCopyBufferToImage> requires <VK_VERSION_1_0>" ); 5186 # endif 5187 5188 d.vkCmdCopyBufferToImage( m_commandBuffer, 5189 static_cast<VkBuffer>( srcBuffer ), 5190 static_cast<VkImage>( dstImage ), 5191 static_cast<VkImageLayout>( dstImageLayout ), 5192 regions.size(), 5193 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 5194 } 5195 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5196 5197 template <typename Dispatch> copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const5198 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 5199 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5200 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5201 uint32_t regionCount, 5202 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 5203 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5204 { 5205 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5206 d.vkCmdCopyImageToBuffer( m_commandBuffer, 5207 static_cast<VkImage>( srcImage ), 5208 static_cast<VkImageLayout>( srcImageLayout ), 5209 static_cast<VkBuffer>( dstBuffer ), 5210 regionCount, 5211 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 5212 } 5213 5214 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5215 template <typename Dispatch> copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const5216 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 5217 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5218 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5219 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 5220 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5221 { 5222 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5223 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5224 VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer && "Function <vkCmdCopyImageToBuffer> requires <VK_VERSION_1_0>" ); 5225 # endif 5226 5227 d.vkCmdCopyImageToBuffer( m_commandBuffer, 5228 static_cast<VkImage>( srcImage ), 5229 static_cast<VkImageLayout>( srcImageLayout ), 5230 static_cast<VkBuffer>( dstBuffer ), 5231 regions.size(), 5232 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 5233 } 5234 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5235 5236 template <typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize dataSize,const void * pData,Dispatch const & d) const5237 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5238 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5239 VULKAN_HPP_NAMESPACE::DeviceSize dataSize, 5240 const void * pData, 5241 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5242 { 5243 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5244 d.vkCmdUpdateBuffer( 5245 m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData ); 5246 } 5247 5248 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5249 template <typename DataType, typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data,Dispatch const & d) const5250 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5251 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5252 VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data, 5253 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5254 { 5255 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5256 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5257 VULKAN_HPP_ASSERT( d.vkCmdUpdateBuffer && "Function <vkCmdUpdateBuffer> requires <VK_VERSION_1_0>" ); 5258 # endif 5259 5260 d.vkCmdUpdateBuffer( m_commandBuffer, 5261 static_cast<VkBuffer>( dstBuffer ), 5262 static_cast<VkDeviceSize>( dstOffset ), 5263 data.size() * sizeof( DataType ), 5264 reinterpret_cast<const void *>( data.data() ) ); 5265 } 5266 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5267 5268 template <typename Dispatch> fillBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize size,uint32_t data,Dispatch const & d) const5269 VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5270 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5271 VULKAN_HPP_NAMESPACE::DeviceSize size, 5272 uint32_t data, 5273 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5274 { 5275 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5276 d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data ); 5277 } 5278 5279 template <typename Dispatch> clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const5280 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 5281 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5282 const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, 5283 uint32_t rangeCount, 5284 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 5285 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5286 { 5287 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5288 d.vkCmdClearColorImage( m_commandBuffer, 5289 static_cast<VkImage>( image ), 5290 static_cast<VkImageLayout>( imageLayout ), 5291 reinterpret_cast<const VkClearColorValue *>( pColor ), 5292 rangeCount, 5293 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 5294 } 5295 5296 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5297 template <typename Dispatch> clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue & color,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const5298 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 5299 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5300 const VULKAN_HPP_NAMESPACE::ClearColorValue & color, 5301 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 5302 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5303 { 5304 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5305 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5306 VULKAN_HPP_ASSERT( d.vkCmdClearColorImage && "Function <vkCmdClearColorImage> requires <VK_VERSION_1_0>" ); 5307 # endif 5308 5309 d.vkCmdClearColorImage( m_commandBuffer, 5310 static_cast<VkImage>( image ), 5311 static_cast<VkImageLayout>( imageLayout ), 5312 reinterpret_cast<const VkClearColorValue *>( &color ), 5313 ranges.size(), 5314 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 5315 } 5316 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5317 5318 template <typename Dispatch> clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const5319 VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 5320 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5321 const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, 5322 uint32_t rangeCount, 5323 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 5324 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5325 { 5326 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5327 d.vkCmdClearDepthStencilImage( m_commandBuffer, 5328 static_cast<VkImage>( image ), 5329 static_cast<VkImageLayout>( imageLayout ), 5330 reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), 5331 rangeCount, 5332 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 5333 } 5334 5335 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5336 template <typename Dispatch> 5337 VULKAN_HPP_INLINE void clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const5338 CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 5339 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5340 const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, 5341 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 5342 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5343 { 5344 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5345 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5346 VULKAN_HPP_ASSERT( d.vkCmdClearDepthStencilImage && "Function <vkCmdClearDepthStencilImage> requires <VK_VERSION_1_0>" ); 5347 # endif 5348 5349 d.vkCmdClearDepthStencilImage( m_commandBuffer, 5350 static_cast<VkImage>( image ), 5351 static_cast<VkImageLayout>( imageLayout ), 5352 reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), 5353 ranges.size(), 5354 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 5355 } 5356 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5357 5358 template <typename Dispatch> clearAttachments(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,uint32_t rectCount,const VULKAN_HPP_NAMESPACE::ClearRect * pRects,Dispatch const & d) const5359 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, 5360 const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, 5361 uint32_t rectCount, 5362 const VULKAN_HPP_NAMESPACE::ClearRect * pRects, 5363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5364 { 5365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5366 d.vkCmdClearAttachments( m_commandBuffer, 5367 attachmentCount, 5368 reinterpret_cast<const VkClearAttachment *>( pAttachments ), 5369 rectCount, 5370 reinterpret_cast<const VkClearRect *>( pRects ) ); 5371 } 5372 5373 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5374 template <typename Dispatch> clearAttachments(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,Dispatch const & d) const5375 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, 5376 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, 5377 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5378 { 5379 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5380 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5381 VULKAN_HPP_ASSERT( d.vkCmdClearAttachments && "Function <vkCmdClearAttachments> requires <VK_VERSION_1_0>" ); 5382 # endif 5383 5384 d.vkCmdClearAttachments( m_commandBuffer, 5385 attachments.size(), 5386 reinterpret_cast<const VkClearAttachment *>( attachments.data() ), 5387 rects.size(), 5388 reinterpret_cast<const VkClearRect *>( rects.data() ) ); 5389 } 5390 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5391 5392 template <typename Dispatch> resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,Dispatch const & d) const5393 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5394 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5395 VULKAN_HPP_NAMESPACE::Image dstImage, 5396 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5397 uint32_t regionCount, 5398 const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, 5399 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5400 { 5401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5402 d.vkCmdResolveImage( m_commandBuffer, 5403 static_cast<VkImage>( srcImage ), 5404 static_cast<VkImageLayout>( srcImageLayout ), 5405 static_cast<VkImage>( dstImage ), 5406 static_cast<VkImageLayout>( dstImageLayout ), 5407 regionCount, 5408 reinterpret_cast<const VkImageResolve *>( pRegions ) ); 5409 } 5410 5411 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5412 template <typename Dispatch> resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,Dispatch const & d) const5413 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5414 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5415 VULKAN_HPP_NAMESPACE::Image dstImage, 5416 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5417 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, 5418 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5419 { 5420 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5421 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5422 VULKAN_HPP_ASSERT( d.vkCmdResolveImage && "Function <vkCmdResolveImage> requires <VK_VERSION_1_0>" ); 5423 # endif 5424 5425 d.vkCmdResolveImage( m_commandBuffer, 5426 static_cast<VkImage>( srcImage ), 5427 static_cast<VkImageLayout>( srcImageLayout ), 5428 static_cast<VkImage>( dstImage ), 5429 static_cast<VkImageLayout>( dstImageLayout ), 5430 regions.size(), 5431 reinterpret_cast<const VkImageResolve *>( regions.data() ) ); 5432 } 5433 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5434 5435 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const5436 VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, 5437 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 5438 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5439 { 5440 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5441 d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 5442 } 5443 5444 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const5445 VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, 5446 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 5447 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5448 { 5449 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5450 d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 5451 } 5452 5453 template <typename Dispatch> waitEvents(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const5454 VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, 5455 const VULKAN_HPP_NAMESPACE::Event * pEvents, 5456 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5457 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5458 uint32_t memoryBarrierCount, 5459 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 5460 uint32_t bufferMemoryBarrierCount, 5461 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 5462 uint32_t imageMemoryBarrierCount, 5463 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 5464 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5465 { 5466 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5467 d.vkCmdWaitEvents( m_commandBuffer, 5468 eventCount, 5469 reinterpret_cast<const VkEvent *>( pEvents ), 5470 static_cast<VkPipelineStageFlags>( srcStageMask ), 5471 static_cast<VkPipelineStageFlags>( dstStageMask ), 5472 memoryBarrierCount, 5473 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 5474 bufferMemoryBarrierCount, 5475 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 5476 imageMemoryBarrierCount, 5477 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 5478 } 5479 5480 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5481 template <typename Dispatch> 5482 VULKAN_HPP_INLINE void waitEvents(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const5483 CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 5484 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5485 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5486 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 5487 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 5488 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 5489 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5490 { 5491 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5492 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5493 VULKAN_HPP_ASSERT( d.vkCmdWaitEvents && "Function <vkCmdWaitEvents> requires <VK_VERSION_1_0>" ); 5494 # endif 5495 5496 d.vkCmdWaitEvents( m_commandBuffer, 5497 events.size(), 5498 reinterpret_cast<const VkEvent *>( events.data() ), 5499 static_cast<VkPipelineStageFlags>( srcStageMask ), 5500 static_cast<VkPipelineStageFlags>( dstStageMask ), 5501 memoryBarriers.size(), 5502 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 5503 bufferMemoryBarriers.size(), 5504 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 5505 imageMemoryBarriers.size(), 5506 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 5507 } 5508 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5509 5510 template <typename Dispatch> pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const5511 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5512 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5513 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 5514 uint32_t memoryBarrierCount, 5515 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 5516 uint32_t bufferMemoryBarrierCount, 5517 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 5518 uint32_t imageMemoryBarrierCount, 5519 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 5520 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5521 { 5522 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5523 d.vkCmdPipelineBarrier( m_commandBuffer, 5524 static_cast<VkPipelineStageFlags>( srcStageMask ), 5525 static_cast<VkPipelineStageFlags>( dstStageMask ), 5526 static_cast<VkDependencyFlags>( dependencyFlags ), 5527 memoryBarrierCount, 5528 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 5529 bufferMemoryBarrierCount, 5530 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 5531 imageMemoryBarrierCount, 5532 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 5533 } 5534 5535 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5536 template <typename Dispatch> 5537 VULKAN_HPP_INLINE void pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const5538 CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5539 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5540 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 5541 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 5542 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 5543 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 5544 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5545 { 5546 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5547 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5548 VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier && "Function <vkCmdPipelineBarrier> requires <VK_VERSION_1_0>" ); 5549 # endif 5550 5551 d.vkCmdPipelineBarrier( m_commandBuffer, 5552 static_cast<VkPipelineStageFlags>( srcStageMask ), 5553 static_cast<VkPipelineStageFlags>( dstStageMask ), 5554 static_cast<VkDependencyFlags>( dependencyFlags ), 5555 memoryBarriers.size(), 5556 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 5557 bufferMemoryBarriers.size(), 5558 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 5559 imageMemoryBarriers.size(), 5560 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 5561 } 5562 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5563 5564 template <typename Dispatch> beginQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,Dispatch const & d) const5565 VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5566 uint32_t query, 5567 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 5568 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5569 { 5570 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5571 d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) ); 5572 } 5573 5574 template <typename Dispatch> endQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const5575 VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5576 { 5577 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5578 d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query ); 5579 } 5580 5581 template <typename Dispatch> resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const5582 VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5583 uint32_t firstQuery, 5584 uint32_t queryCount, 5585 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5586 { 5587 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5588 d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 5589 } 5590 5591 template <typename Dispatch> writeTimestamp(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const5592 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 5593 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5594 uint32_t query, 5595 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5596 { 5597 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5598 d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query ); 5599 } 5600 5601 template <typename Dispatch> copyQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const5602 VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5603 uint32_t firstQuery, 5604 uint32_t queryCount, 5605 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5606 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5607 VULKAN_HPP_NAMESPACE::DeviceSize stride, 5608 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 5609 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5610 { 5611 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5612 d.vkCmdCopyQueryPoolResults( m_commandBuffer, 5613 static_cast<VkQueryPool>( queryPool ), 5614 firstQuery, 5615 queryCount, 5616 static_cast<VkBuffer>( dstBuffer ), 5617 static_cast<VkDeviceSize>( dstOffset ), 5618 static_cast<VkDeviceSize>( stride ), 5619 static_cast<VkQueryResultFlags>( flags ) ); 5620 } 5621 5622 template <typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues,Dispatch const & d) const5623 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5624 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5625 uint32_t offset, 5626 uint32_t size, 5627 const void * pValues, 5628 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5629 { 5630 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5631 d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues ); 5632 } 5633 5634 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5635 template <typename ValuesType, typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values,Dispatch const & d) const5636 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5637 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5638 uint32_t offset, 5639 VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values, 5640 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5641 { 5642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5644 VULKAN_HPP_ASSERT( d.vkCmdPushConstants && "Function <vkCmdPushConstants> requires <VK_VERSION_1_0>" ); 5645 # endif 5646 5647 d.vkCmdPushConstants( m_commandBuffer, 5648 static_cast<VkPipelineLayout>( layout ), 5649 static_cast<VkShaderStageFlags>( stageFlags ), 5650 offset, 5651 values.size() * sizeof( ValuesType ), 5652 reinterpret_cast<const void *>( values.data() ) ); 5653 } 5654 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5655 5656 template <typename Dispatch> beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5657 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 5658 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5659 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5660 { 5661 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5662 d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) ); 5663 } 5664 5665 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5666 template <typename Dispatch> beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5667 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 5668 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5669 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5670 { 5671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5672 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5673 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass && "Function <vkCmdBeginRenderPass> requires <VK_VERSION_1_0>" ); 5674 # endif 5675 5676 d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) ); 5677 } 5678 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5679 5680 template <typename Dispatch> nextSubpass(VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5681 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5682 { 5683 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5684 d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) ); 5685 } 5686 5687 template <typename Dispatch> endRenderPass(Dispatch const & d) const5688 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5689 { 5690 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5691 d.vkCmdEndRenderPass( m_commandBuffer ); 5692 } 5693 5694 template <typename Dispatch> executeCommands(uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const5695 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, 5696 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 5697 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5698 { 5699 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5700 d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 5701 } 5702 5703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5704 template <typename Dispatch> executeCommands(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const5705 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 5706 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5707 { 5708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5709 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5710 VULKAN_HPP_ASSERT( d.vkCmdExecuteCommands && "Function <vkCmdExecuteCommands> requires <VK_VERSION_1_0>" ); 5711 # endif 5712 5713 d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 5714 } 5715 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5716 5717 //=== VK_VERSION_1_1 === 5718 5719 template <typename Dispatch> enumerateInstanceVersion(uint32_t * pApiVersion,Dispatch const & d)5720 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT 5721 { 5722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5723 return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) ); 5724 } 5725 5726 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5727 template <typename Dispatch> enumerateInstanceVersion(Dispatch const & d)5728 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d ) 5729 { 5730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5731 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5732 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceVersion && "Function <vkEnumerateInstanceVersion> requires <VK_VERSION_1_1>" ); 5733 # endif 5734 5735 uint32_t apiVersion; 5736 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceVersion( &apiVersion ) ); 5737 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); 5738 5739 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( apiVersion ) ); 5740 } 5741 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5742 5743 template <typename Dispatch> bindBufferMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const5744 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, 5745 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 5746 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5747 { 5748 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5749 return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 5750 } 5751 5752 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5753 template <typename Dispatch> 5754 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const5755 Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const 5756 { 5757 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5758 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5759 VULKAN_HPP_ASSERT( d.vkBindBufferMemory2 && "Function <vkBindBufferMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 5760 # endif 5761 5762 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5763 d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 5764 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); 5765 5766 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 5767 } 5768 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5769 5770 template <typename Dispatch> bindImageMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const5771 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, 5772 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 5773 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5774 { 5775 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5776 return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 5777 } 5778 5779 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5780 template <typename Dispatch> 5781 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const5782 Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const 5783 { 5784 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5785 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5786 VULKAN_HPP_ASSERT( d.vkBindImageMemory2 && "Function <vkBindImageMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 5787 # endif 5788 5789 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5790 d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 5791 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); 5792 5793 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 5794 } 5795 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5796 5797 template <typename Dispatch> getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const5798 VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, 5799 uint32_t localDeviceIndex, 5800 uint32_t remoteDeviceIndex, 5801 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 5802 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5803 { 5804 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5805 d.vkGetDeviceGroupPeerMemoryFeatures( 5806 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 5807 } 5808 5809 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5810 template <typename Dispatch> getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const5811 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( 5812 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5813 { 5814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5815 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5816 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeatures && 5817 "Function <vkGetDeviceGroupPeerMemoryFeatures> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" ); 5818 # endif 5819 5820 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 5821 d.vkGetDeviceGroupPeerMemoryFeatures( 5822 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 5823 5824 return peerMemoryFeatures; 5825 } 5826 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5827 5828 template <typename Dispatch> setDeviceMask(uint32_t deviceMask,Dispatch const & d) const5829 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5830 { 5831 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5832 d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); 5833 } 5834 5835 template <typename Dispatch> dispatchBase(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const5836 VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, 5837 uint32_t baseGroupY, 5838 uint32_t baseGroupZ, 5839 uint32_t groupCountX, 5840 uint32_t groupCountY, 5841 uint32_t groupCountZ, 5842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5843 { 5844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5845 d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 5846 } 5847 5848 template <typename Dispatch> 5849 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumeratePhysicalDeviceGroups(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const5850 Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, 5851 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 5852 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5853 { 5854 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5855 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( 5856 m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 5857 } 5858 5859 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5860 template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> 5861 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 5862 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(Dispatch const & d) const5863 Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const 5864 { 5865 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5866 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5867 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && 5868 "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 5869 # endif 5870 5871 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 5872 uint32_t physicalDeviceGroupCount; 5873 VULKAN_HPP_NAMESPACE::Result result; 5874 do 5875 { 5876 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 5877 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 5878 { 5879 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5880 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( 5881 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 5882 } 5883 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 5884 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 5885 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 5886 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 5887 { 5888 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5889 } 5890 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 5891 } 5892 5893 template <typename PhysicalDeviceGroupPropertiesAllocator, 5894 typename Dispatch, 5895 typename std::enable_if< 5896 std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, 5897 int>::type> 5898 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 5899 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const5900 Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 5901 { 5902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5903 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5904 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && 5905 "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 5906 # endif 5907 5908 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 5909 physicalDeviceGroupPropertiesAllocator ); 5910 uint32_t physicalDeviceGroupCount; 5911 VULKAN_HPP_NAMESPACE::Result result; 5912 do 5913 { 5914 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 5915 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 5916 { 5917 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5918 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( 5919 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 5920 } 5921 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 5922 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 5923 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 5924 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 5925 { 5926 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5927 } 5928 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 5929 } 5930 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5931 5932 template <typename Dispatch> getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5933 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 5934 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 5935 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5936 { 5937 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5938 d.vkGetImageMemoryRequirements2( 5939 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 5940 } 5941 5942 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5943 template <typename Dispatch> 5944 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const5945 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5946 { 5947 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5948 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5949 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && 5950 "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 5951 # endif 5952 5953 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 5954 d.vkGetImageMemoryRequirements2( 5955 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5956 5957 return memoryRequirements; 5958 } 5959 5960 template <typename X, typename Y, typename... Z, typename Dispatch> 5961 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const5962 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5963 { 5964 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5965 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5966 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && 5967 "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 5968 # endif 5969 5970 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 5971 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 5972 d.vkGetImageMemoryRequirements2( 5973 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5974 5975 return structureChain; 5976 } 5977 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5978 5979 template <typename Dispatch> getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5980 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 5981 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 5982 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5983 { 5984 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5985 d.vkGetBufferMemoryRequirements2( 5986 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 5987 } 5988 5989 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5990 template <typename Dispatch> 5991 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const5992 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5993 { 5994 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5995 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5996 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && 5997 "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 5998 # endif 5999 6000 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 6001 d.vkGetBufferMemoryRequirements2( 6002 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 6003 6004 return memoryRequirements; 6005 } 6006 6007 template <typename X, typename Y, typename... Z, typename Dispatch> 6008 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const6009 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6010 { 6011 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6012 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6013 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && 6014 "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6015 # endif 6016 6017 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6018 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 6019 d.vkGetBufferMemoryRequirements2( 6020 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 6021 6022 return structureChain; 6023 } 6024 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6025 6026 template <typename Dispatch> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const6027 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 6028 uint32_t * pSparseMemoryRequirementCount, 6029 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 6030 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6031 { 6032 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6033 d.vkGetImageSparseMemoryRequirements2( m_device, 6034 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 6035 pSparseMemoryRequirementCount, 6036 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 6037 } 6038 6039 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6040 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 6041 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const6042 Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const 6043 { 6044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6045 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6046 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && 6047 "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6048 # endif 6049 6050 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 6051 uint32_t sparseMemoryRequirementCount; 6052 d.vkGetImageSparseMemoryRequirements2( 6053 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 6054 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6055 d.vkGetImageSparseMemoryRequirements2( m_device, 6056 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 6057 &sparseMemoryRequirementCount, 6058 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 6059 6060 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 6061 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 6062 { 6063 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6064 } 6065 return sparseMemoryRequirements; 6066 } 6067 6068 template <typename SparseImageMemoryRequirements2Allocator, 6069 typename Dispatch, 6070 typename std::enable_if< 6071 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 6072 int>::type> 6073 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const6074 Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, 6075 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 6076 Dispatch const & d ) const 6077 { 6078 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6079 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6080 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && 6081 "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6082 # endif 6083 6084 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 6085 sparseImageMemoryRequirements2Allocator ); 6086 uint32_t sparseMemoryRequirementCount; 6087 d.vkGetImageSparseMemoryRequirements2( 6088 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 6089 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6090 d.vkGetImageSparseMemoryRequirements2( m_device, 6091 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 6092 &sparseMemoryRequirementCount, 6093 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 6094 6095 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 6096 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 6097 { 6098 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6099 } 6100 return sparseMemoryRequirements; 6101 } 6102 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6103 6104 template <typename Dispatch> getFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const6105 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6106 { 6107 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6108 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 6109 } 6110 6111 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6112 template <typename Dispatch> 6113 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const & d) const6114 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6115 { 6116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6117 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6118 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && 6119 "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6120 # endif 6121 6122 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 6123 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 6124 6125 return features; 6126 } 6127 6128 template <typename X, typename Y, typename... Z, typename Dispatch> 6129 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2(Dispatch const & d) const6130 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6131 { 6132 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6133 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6134 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && 6135 "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6136 # endif 6137 6138 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6139 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 6140 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 6141 6142 return structureChain; 6143 } 6144 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6145 6146 template <typename Dispatch> getProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const6147 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 6148 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6149 { 6150 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6151 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 6152 } 6153 6154 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6155 template <typename Dispatch> 6156 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const & d) const6157 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6158 { 6159 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6160 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6161 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && 6162 "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6163 # endif 6164 6165 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 6166 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 6167 6168 return properties; 6169 } 6170 6171 template <typename X, typename Y, typename... Z, typename Dispatch> 6172 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2(Dispatch const & d) const6173 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6174 { 6175 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6176 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6177 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && 6178 "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6179 # endif 6180 6181 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6182 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 6183 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 6184 6185 return structureChain; 6186 } 6187 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6188 6189 template <typename Dispatch> getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const6190 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, 6191 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 6192 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6193 { 6194 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6195 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 6196 } 6197 6198 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6199 template <typename Dispatch> 6200 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const6201 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6202 { 6203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6204 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6205 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && 6206 "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6207 # endif 6208 6209 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 6210 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 6211 6212 return formatProperties; 6213 } 6214 6215 template <typename X, typename Y, typename... Z, typename Dispatch> 6216 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const6217 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6218 { 6219 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6220 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6221 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && 6222 "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6223 # endif 6224 6225 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6226 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 6227 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 6228 6229 return structureChain; 6230 } 6231 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6232 6233 template <typename Dispatch> 6234 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const6235 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 6236 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 6237 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6238 { 6239 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6240 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 6241 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 6242 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 6243 } 6244 6245 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6246 template <typename Dispatch> 6247 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const6248 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 6249 { 6250 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6251 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6252 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && 6253 "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6254 # endif 6255 6256 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 6257 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6258 d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 6259 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 6260 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 6261 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 6262 6263 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 6264 } 6265 6266 template <typename X, typename Y, typename... Z, typename Dispatch> 6267 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const6268 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 6269 { 6270 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6271 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6272 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && 6273 "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6274 # endif 6275 6276 StructureChain<X, Y, Z...> structureChain; 6277 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 6278 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6279 d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 6280 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 6281 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 6282 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 6283 6284 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 6285 } 6286 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6287 6288 template <typename Dispatch> getQueueFamilyProperties2(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const6289 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, 6290 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 6291 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6292 { 6293 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6294 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6295 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 6296 } 6297 6298 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6299 template <typename QueueFamilyProperties2Allocator, typename Dispatch> 6300 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(Dispatch const & d) const6301 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 6302 { 6303 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6304 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6305 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6306 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6307 # endif 6308 6309 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 6310 uint32_t queueFamilyPropertyCount; 6311 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6312 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6313 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6314 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6315 6316 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6317 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6318 { 6319 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6320 } 6321 return queueFamilyProperties; 6322 } 6323 6324 template < 6325 typename QueueFamilyProperties2Allocator, 6326 typename Dispatch, 6327 typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 6328 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const6329 PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const 6330 { 6331 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6332 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6333 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6334 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6335 # endif 6336 6337 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); 6338 uint32_t queueFamilyPropertyCount; 6339 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6340 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6341 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6342 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6343 6344 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6345 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6346 { 6347 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6348 } 6349 return queueFamilyProperties; 6350 } 6351 6352 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 6353 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(Dispatch const & d) const6354 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 6355 { 6356 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6357 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6358 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6359 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6360 # endif 6361 6362 std::vector<StructureChain, StructureChainAllocator> structureChains; 6363 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 6364 uint32_t queueFamilyPropertyCount; 6365 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6366 structureChains.resize( queueFamilyPropertyCount ); 6367 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6368 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6369 { 6370 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 6371 } 6372 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6373 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6374 6375 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6376 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6377 { 6378 structureChains.resize( queueFamilyPropertyCount ); 6379 } 6380 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6381 { 6382 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 6383 } 6384 return structureChains; 6385 } 6386 6387 template <typename StructureChain, 6388 typename StructureChainAllocator, 6389 typename Dispatch, 6390 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 6391 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const6392 PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const 6393 { 6394 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6395 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6396 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6397 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6398 # endif 6399 6400 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 6401 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 6402 uint32_t queueFamilyPropertyCount; 6403 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6404 structureChains.resize( queueFamilyPropertyCount ); 6405 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6406 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6407 { 6408 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 6409 } 6410 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6411 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6412 6413 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6414 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6415 { 6416 structureChains.resize( queueFamilyPropertyCount ); 6417 } 6418 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6419 { 6420 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 6421 } 6422 return structureChains; 6423 } 6424 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6425 6426 template <typename Dispatch> getMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const6427 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 6428 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6429 { 6430 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6431 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 6432 } 6433 6434 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6435 template <typename Dispatch> 6436 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const & d) const6437 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6438 { 6439 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6440 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6441 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && 6442 "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6443 # endif 6444 6445 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 6446 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 6447 6448 return memoryProperties; 6449 } 6450 6451 template <typename X, typename Y, typename... Z, typename Dispatch> 6452 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2(Dispatch const & d) const6453 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6454 { 6455 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6456 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6457 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && 6458 "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6459 # endif 6460 6461 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6462 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 6463 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 6464 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 6465 6466 return structureChain; 6467 } 6468 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6469 6470 template <typename Dispatch> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const6471 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 6472 uint32_t * pPropertyCount, 6473 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 6474 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6475 { 6476 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6477 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 6478 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 6479 pPropertyCount, 6480 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 6481 } 6482 6483 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6484 template <typename SparseImageFormatProperties2Allocator, typename Dispatch> 6485 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const6486 PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const 6487 { 6488 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6489 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6490 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && 6491 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6492 # endif 6493 6494 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 6495 uint32_t propertyCount; 6496 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 6497 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 6498 properties.resize( propertyCount ); 6499 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 6500 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 6501 &propertyCount, 6502 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 6503 6504 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 6505 if ( propertyCount < properties.size() ) 6506 { 6507 properties.resize( propertyCount ); 6508 } 6509 return properties; 6510 } 6511 6512 template < 6513 typename SparseImageFormatProperties2Allocator, 6514 typename Dispatch, 6515 typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, 6516 int>::type> 6517 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const6518 PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 6519 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 6520 Dispatch const & d ) const 6521 { 6522 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6523 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6524 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && 6525 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6526 # endif 6527 6528 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); 6529 uint32_t propertyCount; 6530 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 6531 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 6532 properties.resize( propertyCount ); 6533 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 6534 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 6535 &propertyCount, 6536 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 6537 6538 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 6539 if ( propertyCount < properties.size() ) 6540 { 6541 properties.resize( propertyCount ); 6542 } 6543 return properties; 6544 } 6545 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6546 6547 template <typename Dispatch> trimCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const6548 VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 6549 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 6550 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6551 { 6552 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6553 d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 6554 } 6555 6556 template <typename Dispatch> getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const6557 VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, 6558 VULKAN_HPP_NAMESPACE::Queue * pQueue, 6559 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6560 { 6561 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6562 d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) ); 6563 } 6564 6565 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6566 template <typename Dispatch> getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,Dispatch const & d) const6567 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, 6568 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6569 { 6570 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6571 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6572 VULKAN_HPP_ASSERT( d.vkGetDeviceQueue2 && "Function <vkGetDeviceQueue2> requires <VK_VERSION_1_1>" ); 6573 # endif 6574 6575 VULKAN_HPP_NAMESPACE::Queue queue; 6576 d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) ); 6577 6578 return queue; 6579 } 6580 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6581 6582 template <typename Dispatch> 6583 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const6584 Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 6585 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6586 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 6587 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6588 { 6589 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6590 return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, 6591 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 6592 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6593 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 6594 } 6595 6596 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6597 template <typename Dispatch> 6598 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6599 Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 6600 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6601 Dispatch const & d ) const 6602 { 6603 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6604 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6605 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && 6606 "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6607 # endif 6608 6609 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 6610 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion( 6611 m_device, 6612 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 6613 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6614 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 6615 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); 6616 6617 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); 6618 } 6619 6620 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6621 template <typename Dispatch> 6622 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6623 Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 6624 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6625 Dispatch const & d ) const 6626 { 6627 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6628 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6629 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && 6630 "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6631 # endif 6632 6633 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 6634 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion( 6635 m_device, 6636 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 6637 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6638 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 6639 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); 6640 6641 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 6642 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 6643 } 6644 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 6645 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6646 6647 template <typename Dispatch> destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6648 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6649 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6650 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6651 { 6652 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6653 d.vkDestroySamplerYcbcrConversion( 6654 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6655 } 6656 6657 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6658 template <typename Dispatch> destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6659 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6660 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6661 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6662 { 6663 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6664 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6665 VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && 6666 "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6667 # endif 6668 6669 d.vkDestroySamplerYcbcrConversion( 6670 m_device, 6671 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6672 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6673 } 6674 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6675 6676 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6677 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6678 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6679 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6680 { 6681 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6682 d.vkDestroySamplerYcbcrConversion( 6683 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6684 } 6685 6686 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6687 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6688 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6689 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6690 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6691 { 6692 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6693 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6694 VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && 6695 "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6696 # endif 6697 6698 d.vkDestroySamplerYcbcrConversion( 6699 m_device, 6700 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6701 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6702 } 6703 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6704 6705 template <typename Dispatch> 6706 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const6707 Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 6708 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6709 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 6710 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6711 { 6712 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6713 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, 6714 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 6715 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6716 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 6717 } 6718 6719 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6720 template <typename Dispatch> 6721 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6722 Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 6723 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6724 Dispatch const & d ) const 6725 { 6726 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6727 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6728 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && 6729 "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6730 # endif 6731 6732 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 6733 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate( 6734 m_device, 6735 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 6736 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6737 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 6738 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); 6739 6740 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); 6741 } 6742 6743 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6744 template <typename Dispatch> 6745 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6746 Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 6747 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6748 Dispatch const & d ) const 6749 { 6750 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6751 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6752 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && 6753 "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6754 # endif 6755 6756 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 6757 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate( 6758 m_device, 6759 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 6760 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6761 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 6762 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); 6763 6764 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 6765 UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 6766 descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 6767 } 6768 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 6769 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6770 6771 template <typename Dispatch> destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6772 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6773 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6774 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6775 { 6776 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6777 d.vkDestroyDescriptorUpdateTemplate( 6778 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6779 } 6780 6781 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6782 template <typename Dispatch> destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6783 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6784 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6785 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6786 { 6787 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6788 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6789 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && 6790 "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6791 # endif 6792 6793 d.vkDestroyDescriptorUpdateTemplate( 6794 m_device, 6795 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6796 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6797 } 6798 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6799 6800 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6801 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6802 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6803 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6804 { 6805 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6806 d.vkDestroyDescriptorUpdateTemplate( 6807 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6808 } 6809 6810 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6811 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6812 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6813 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6814 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6815 { 6816 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6817 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6818 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && 6819 "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6820 # endif 6821 6822 d.vkDestroyDescriptorUpdateTemplate( 6823 m_device, 6824 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6825 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6826 } 6827 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6828 6829 template <typename Dispatch> updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const6830 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 6831 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6832 const void * pData, 6833 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6834 { 6835 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6836 d.vkUpdateDescriptorSetWithTemplate( 6837 m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); 6838 } 6839 6840 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6841 template <typename DataType, typename Dispatch> updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,DataType const & data,Dispatch const & d) const6842 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 6843 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6844 DataType const & data, 6845 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6846 { 6847 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6848 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6849 VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplate && 6850 "Function <vkUpdateDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6851 # endif 6852 6853 d.vkUpdateDescriptorSetWithTemplate( m_device, 6854 static_cast<VkDescriptorSet>( descriptorSet ), 6855 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6856 reinterpret_cast<const void *>( &data ) ); 6857 } 6858 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6859 6860 template <typename Dispatch> getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const6861 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 6862 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 6863 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6864 { 6865 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6866 d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, 6867 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 6868 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 6869 } 6870 6871 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6872 template <typename Dispatch> 6873 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const6874 PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, 6875 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6876 { 6877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6878 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6879 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferProperties && 6880 "Function <vkGetPhysicalDeviceExternalBufferProperties> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" ); 6881 # endif 6882 6883 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 6884 d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, 6885 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 6886 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 6887 6888 return externalBufferProperties; 6889 } 6890 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6891 6892 template <typename Dispatch> getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const6893 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 6894 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 6895 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6896 { 6897 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6898 d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, 6899 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 6900 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 6901 } 6902 6903 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6904 template <typename Dispatch> 6905 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const6906 PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, 6907 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6908 { 6909 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6910 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6911 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFenceProperties && 6912 "Function <vkGetPhysicalDeviceExternalFenceProperties> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" ); 6913 # endif 6914 6915 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 6916 d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, 6917 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 6918 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 6919 6920 return externalFenceProperties; 6921 } 6922 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6923 6924 template <typename Dispatch> 6925 VULKAN_HPP_INLINE void getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const6926 PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 6927 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 6928 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6929 { 6930 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6931 d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, 6932 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 6933 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 6934 } 6935 6936 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6937 template <typename Dispatch> 6938 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const6939 PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, 6940 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6941 { 6942 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6943 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6944 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphoreProperties && 6945 "Function <vkGetPhysicalDeviceExternalSemaphoreProperties> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" ); 6946 # endif 6947 6948 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 6949 d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, 6950 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 6951 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 6952 6953 return externalSemaphoreProperties; 6954 } 6955 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6956 6957 template <typename Dispatch> getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const6958 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 6959 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 6960 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6961 { 6962 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6963 d.vkGetDescriptorSetLayoutSupport( 6964 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 6965 } 6966 6967 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6968 template <typename Dispatch> 6969 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const6970 Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 6971 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6972 { 6973 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6974 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6975 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 6976 # endif 6977 6978 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 6979 d.vkGetDescriptorSetLayoutSupport( 6980 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 6981 6982 return support; 6983 } 6984 6985 template <typename X, typename Y, typename... Z, typename Dispatch> 6986 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const6987 Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 6988 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6989 { 6990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6991 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6992 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 6993 # endif 6994 6995 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6996 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 6997 d.vkGetDescriptorSetLayoutSupport( 6998 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 6999 7000 return structureChain; 7001 } 7002 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7003 7004 //=== VK_VERSION_1_2 === 7005 7006 template <typename Dispatch> drawIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const7007 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 7008 VULKAN_HPP_NAMESPACE::DeviceSize offset, 7009 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 7010 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 7011 uint32_t maxDrawCount, 7012 uint32_t stride, 7013 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7014 { 7015 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7016 d.vkCmdDrawIndirectCount( m_commandBuffer, 7017 static_cast<VkBuffer>( buffer ), 7018 static_cast<VkDeviceSize>( offset ), 7019 static_cast<VkBuffer>( countBuffer ), 7020 static_cast<VkDeviceSize>( countBufferOffset ), 7021 maxDrawCount, 7022 stride ); 7023 } 7024 7025 template <typename Dispatch> drawIndexedIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const7026 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 7027 VULKAN_HPP_NAMESPACE::DeviceSize offset, 7028 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 7029 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 7030 uint32_t maxDrawCount, 7031 uint32_t stride, 7032 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7033 { 7034 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7035 d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, 7036 static_cast<VkBuffer>( buffer ), 7037 static_cast<VkDeviceSize>( offset ), 7038 static_cast<VkBuffer>( countBuffer ), 7039 static_cast<VkDeviceSize>( countBufferOffset ), 7040 maxDrawCount, 7041 stride ); 7042 } 7043 7044 template <typename Dispatch> createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const7045 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 7046 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7047 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 7048 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7049 { 7050 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7051 return static_cast<Result>( d.vkCreateRenderPass2( m_device, 7052 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 7053 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7054 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 7055 } 7056 7057 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7058 template <typename Dispatch> 7059 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7060 Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 7061 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7062 Dispatch const & d ) const 7063 { 7064 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7065 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7066 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7067 # endif 7068 7069 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 7070 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7071 d.vkCreateRenderPass2( m_device, 7072 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 7073 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7074 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 7075 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); 7076 7077 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); 7078 } 7079 7080 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7081 template <typename Dispatch> 7082 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2Unique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7083 Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 7084 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7085 Dispatch const & d ) const 7086 { 7087 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7088 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7089 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7090 # endif 7091 7092 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 7093 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7094 d.vkCreateRenderPass2( m_device, 7095 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 7096 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7097 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 7098 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); 7099 7100 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 7101 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 7102 } 7103 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 7104 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7105 7106 template <typename Dispatch> beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const7107 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 7108 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 7109 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7110 { 7111 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7112 d.vkCmdBeginRenderPass2( 7113 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 7114 } 7115 7116 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7117 template <typename Dispatch> beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const7118 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 7119 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 7120 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7121 { 7122 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7123 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7124 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2 && "Function <vkCmdBeginRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7125 # endif 7126 7127 d.vkCmdBeginRenderPass2( 7128 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 7129 } 7130 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7131 7132 template <typename Dispatch> nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const7133 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 7134 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 7135 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7136 { 7137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7138 d.vkCmdNextSubpass2( 7139 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 7140 } 7141 7142 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7143 template <typename Dispatch> nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const7144 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 7145 const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 7146 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7147 { 7148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7149 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7150 VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2 && "Function <vkCmdNextSubpass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7151 # endif 7152 7153 d.vkCmdNextSubpass2( 7154 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 7155 } 7156 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7157 7158 template <typename Dispatch> endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const7159 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 7160 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7161 { 7162 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7163 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 7164 } 7165 7166 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7167 template <typename Dispatch> endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const7168 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 7169 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7170 { 7171 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7172 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7173 VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2 && "Function <vkCmdEndRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7174 # endif 7175 7176 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 7177 } 7178 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7179 7180 template <typename Dispatch> 7181 VULKAN_HPP_INLINE void resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const7182 Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7183 { 7184 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7185 d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 7186 } 7187 7188 template <typename Dispatch> getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const7189 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 7190 uint64_t * pValue, 7191 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7192 { 7193 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7194 return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); 7195 } 7196 7197 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7198 template <typename Dispatch> getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const7199 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 7200 Dispatch const & d ) const 7201 { 7202 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7203 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7204 VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValue && "Function <vkGetSemaphoreCounterValue> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 7205 # endif 7206 7207 uint64_t value; 7208 VULKAN_HPP_NAMESPACE::Result result = 7209 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 7210 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); 7211 7212 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); 7213 } 7214 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7215 7216 template <typename Dispatch> waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const7217 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 7218 uint64_t timeout, 7219 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7220 { 7221 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7222 return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 7223 } 7224 7225 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7226 template <typename Dispatch> 7227 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const7228 Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const 7229 { 7230 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7231 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7232 VULKAN_HPP_ASSERT( d.vkWaitSemaphores && "Function <vkWaitSemaphores> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 7233 # endif 7234 7235 VULKAN_HPP_NAMESPACE::Result result = 7236 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 7237 VULKAN_HPP_NAMESPACE::detail::resultCheck( 7238 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 7239 7240 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 7241 } 7242 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7243 7244 template <typename Dispatch> signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const7245 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, 7246 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7247 { 7248 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7249 return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 7250 } 7251 7252 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7253 template <typename Dispatch> 7254 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,Dispatch const & d) const7255 Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 7256 { 7257 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7258 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7259 VULKAN_HPP_ASSERT( d.vkSignalSemaphore && "Function <vkSignalSemaphore> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 7260 # endif 7261 7262 VULKAN_HPP_NAMESPACE::Result result = 7263 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 7264 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); 7265 7266 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 7267 } 7268 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7269 7270 template <typename Dispatch> getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const7271 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 7272 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7273 { 7274 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7275 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 7276 } 7277 7278 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7279 template <typename Dispatch> getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const7280 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 7281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7282 { 7283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7284 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7285 VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddress && 7286 "Function <vkGetBufferDeviceAddress> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 7287 # endif 7288 7289 VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 7290 7291 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 7292 } 7293 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7294 7295 template <typename Dispatch> getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const7296 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 7297 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7298 { 7299 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7300 return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 7301 } 7302 7303 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7304 template <typename Dispatch> getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const7305 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 7306 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7307 { 7308 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7309 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7310 VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddress && 7311 "Function <vkGetBufferOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 7312 # endif 7313 7314 uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 7315 7316 return result; 7317 } 7318 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7319 7320 template <typename Dispatch> getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const7321 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 7322 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7323 { 7324 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7325 return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 7326 } 7327 7328 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7329 template <typename Dispatch> getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const7330 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, 7331 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7332 { 7333 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7334 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7335 VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddress && 7336 "Function <vkGetDeviceMemoryOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 7337 # endif 7338 7339 uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 7340 7341 return result; 7342 } 7343 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7344 7345 //=== VK_VERSION_1_3 === 7346 7347 template <typename Dispatch> getToolProperties(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,Dispatch const & d) const7348 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, 7349 VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, 7350 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7351 { 7352 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7353 return static_cast<Result>( 7354 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); 7355 } 7356 7357 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7358 template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch> 7359 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7360 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties(Dispatch const & d) const7361 PhysicalDevice::getToolProperties( Dispatch const & d ) const 7362 { 7363 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7364 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7365 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && 7366 "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 7367 # endif 7368 7369 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; 7370 uint32_t toolCount; 7371 VULKAN_HPP_NAMESPACE::Result result; 7372 do 7373 { 7374 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); 7375 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 7376 { 7377 toolProperties.resize( toolCount ); 7378 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7379 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 7380 } 7381 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7382 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); 7383 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 7384 if ( toolCount < toolProperties.size() ) 7385 { 7386 toolProperties.resize( toolCount ); 7387 } 7388 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 7389 } 7390 7391 template < 7392 typename PhysicalDeviceToolPropertiesAllocator, 7393 typename Dispatch, 7394 typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, 7395 int>::type> 7396 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7397 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties(PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,Dispatch const & d) const7398 PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const 7399 { 7400 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7401 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7402 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && 7403 "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 7404 # endif 7405 7406 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( 7407 physicalDeviceToolPropertiesAllocator ); 7408 uint32_t toolCount; 7409 VULKAN_HPP_NAMESPACE::Result result; 7410 do 7411 { 7412 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); 7413 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 7414 { 7415 toolProperties.resize( toolCount ); 7416 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7417 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 7418 } 7419 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7420 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); 7421 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 7422 if ( toolCount < toolProperties.size() ) 7423 { 7424 toolProperties.resize( toolCount ); 7425 } 7426 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 7427 } 7428 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7429 7430 template <typename Dispatch> createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,Dispatch const & d) const7431 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, 7432 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7433 VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, 7434 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7435 { 7436 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7437 return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device, 7438 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), 7439 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7440 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); 7441 } 7442 7443 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7444 template <typename Dispatch> 7445 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7446 Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 7447 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7448 Dispatch const & d ) const 7449 { 7450 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7451 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7452 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7453 # endif 7454 7455 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 7456 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7457 d.vkCreatePrivateDataSlot( m_device, 7458 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 7459 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7460 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 7461 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); 7462 7463 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); 7464 } 7465 7466 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7467 template <typename Dispatch> 7468 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7469 Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 7470 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7471 Dispatch const & d ) const 7472 { 7473 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7474 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7475 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7476 # endif 7477 7478 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 7479 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7480 d.vkCreatePrivateDataSlot( m_device, 7481 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 7482 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7483 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 7484 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); 7485 7486 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 7487 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 7488 } 7489 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 7490 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7491 7492 template <typename Dispatch> destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7493 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7494 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7495 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7496 { 7497 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7498 d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7499 } 7500 7501 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7502 template <typename Dispatch> destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7503 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7504 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7505 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7506 { 7507 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7508 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7509 VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7510 # endif 7511 7512 d.vkDestroyPrivateDataSlot( 7513 m_device, 7514 static_cast<VkPrivateDataSlot>( privateDataSlot ), 7515 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7516 } 7517 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7518 7519 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7520 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7521 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7522 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7523 { 7524 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7525 d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7526 } 7527 7528 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7529 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7530 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7531 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7532 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7533 { 7534 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7535 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7536 VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7537 # endif 7538 7539 d.vkDestroyPrivateDataSlot( 7540 m_device, 7541 static_cast<VkPrivateDataSlot>( privateDataSlot ), 7542 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7543 } 7544 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7545 7546 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 7547 template <typename Dispatch> setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const7548 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7549 uint64_t objectHandle, 7550 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7551 uint64_t data, 7552 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7553 { 7554 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7555 return static_cast<Result>( 7556 d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 7557 } 7558 #else 7559 template <typename Dispatch> setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const7560 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7561 uint64_t objectHandle, 7562 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7563 uint64_t data, 7564 Dispatch const & d ) const 7565 { 7566 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7567 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7568 VULKAN_HPP_ASSERT( d.vkSetPrivateData && "Function <vkSetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7569 # endif 7570 7571 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7572 d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 7573 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); 7574 7575 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 7576 } 7577 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7578 7579 template <typename Dispatch> getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t * pData,Dispatch const & d) const7580 VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7581 uint64_t objectHandle, 7582 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7583 uint64_t * pData, 7584 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7585 { 7586 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7587 d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); 7588 } 7589 7590 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7591 template <typename Dispatch> getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Dispatch const & d) const7592 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7593 uint64_t objectHandle, 7594 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7595 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7596 { 7597 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7599 VULKAN_HPP_ASSERT( d.vkGetPrivateData && "Function <vkGetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7600 # endif 7601 7602 uint64_t data; 7603 d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); 7604 7605 return data; 7606 } 7607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7608 7609 template <typename Dispatch> setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const7610 VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, 7611 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 7612 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7613 { 7614 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7615 d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 7616 } 7617 7618 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7619 template <typename Dispatch> setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const7620 VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, 7621 const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 7622 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7623 { 7624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7625 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7626 VULKAN_HPP_ASSERT( d.vkCmdSetEvent2 && "Function <vkCmdSetEvent2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7627 # endif 7628 7629 d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 7630 } 7631 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7632 7633 template <typename Dispatch> resetEvent2(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const7634 VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, 7635 VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, 7636 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7637 { 7638 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7639 d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); 7640 } 7641 7642 template <typename Dispatch> waitEvents2(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const7643 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, 7644 const VULKAN_HPP_NAMESPACE::Event * pEvents, 7645 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, 7646 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7647 { 7648 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7649 d.vkCmdWaitEvents2( 7650 m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); 7651 } 7652 7653 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7654 template <typename Dispatch> waitEvents2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const7655 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 7656 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, 7657 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 7658 { 7659 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7660 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7661 VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2 && "Function <vkCmdWaitEvents2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7662 # endif 7663 # ifdef VULKAN_HPP_NO_EXCEPTIONS 7664 VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); 7665 # else 7666 if ( events.size() != dependencyInfos.size() ) 7667 { 7668 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); 7669 } 7670 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 7671 7672 d.vkCmdWaitEvents2( m_commandBuffer, 7673 events.size(), 7674 reinterpret_cast<const VkEvent *>( events.data() ), 7675 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); 7676 } 7677 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7678 7679 template <typename Dispatch> pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const7680 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 7681 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7682 { 7683 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7684 d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 7685 } 7686 7687 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7688 template <typename Dispatch> pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const7689 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 7690 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7691 { 7692 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7693 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7694 VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2 && "Function <vkCmdPipelineBarrier2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7695 # endif 7696 7697 d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 7698 } 7699 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7700 7701 template <typename Dispatch> writeTimestamp2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const7702 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 7703 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 7704 uint32_t query, 7705 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7706 { 7707 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7708 d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); 7709 } 7710 7711 template <typename Dispatch> submit2(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const7712 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, 7713 const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, 7714 VULKAN_HPP_NAMESPACE::Fence fence, 7715 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7716 { 7717 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7718 return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 7719 } 7720 7721 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7722 template <typename Dispatch> submit2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const7723 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2( 7724 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 7725 { 7726 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7727 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7728 VULKAN_HPP_ASSERT( d.vkQueueSubmit2 && "Function <vkQueueSubmit2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7729 # endif 7730 7731 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7732 d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 7733 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); 7734 7735 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 7736 } 7737 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7738 7739 template <typename Dispatch> copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const7740 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, 7741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7742 { 7743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7744 d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); 7745 } 7746 7747 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7748 template <typename Dispatch> copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const7749 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, 7750 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7751 { 7752 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7753 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7754 VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2 && "Function <vkCmdCopyBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7755 # endif 7756 7757 d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); 7758 } 7759 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7760 7761 template <typename Dispatch> copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const7762 VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7763 { 7764 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7765 d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); 7766 } 7767 7768 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7769 template <typename Dispatch> copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const7770 VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7771 { 7772 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7773 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7774 VULKAN_HPP_ASSERT( d.vkCmdCopyImage2 && "Function <vkCmdCopyImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7775 # endif 7776 7777 d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); 7778 } 7779 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7780 7781 template <typename Dispatch> copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const7782 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, 7783 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7784 { 7785 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7786 d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); 7787 } 7788 7789 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7790 template <typename Dispatch> copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const7791 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, 7792 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7793 { 7794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7795 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7796 VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2 && "Function <vkCmdCopyBufferToImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7797 # endif 7798 7799 d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); 7800 } 7801 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7802 7803 template <typename Dispatch> copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const7804 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, 7805 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7806 { 7807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7808 d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); 7809 } 7810 7811 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7812 template <typename Dispatch> copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const7813 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, 7814 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7815 { 7816 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7817 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7818 VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2 && "Function <vkCmdCopyImageToBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7819 # endif 7820 7821 d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); 7822 } 7823 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7824 7825 template <typename Dispatch> blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const7826 VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7827 { 7828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7829 d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); 7830 } 7831 7832 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7833 template <typename Dispatch> blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const7834 VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7835 { 7836 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7837 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7838 VULKAN_HPP_ASSERT( d.vkCmdBlitImage2 && "Function <vkCmdBlitImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7839 # endif 7840 7841 d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); 7842 } 7843 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7844 7845 template <typename Dispatch> resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const7846 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, 7847 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7848 { 7849 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7850 d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); 7851 } 7852 7853 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7854 template <typename Dispatch> resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const7855 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, 7856 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7857 { 7858 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7859 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7860 VULKAN_HPP_ASSERT( d.vkCmdResolveImage2 && "Function <vkCmdResolveImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7861 # endif 7862 7863 d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); 7864 } 7865 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7866 7867 template <typename Dispatch> beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,Dispatch const & d) const7868 VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 7869 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7870 { 7871 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7872 d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); 7873 } 7874 7875 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7876 template <typename Dispatch> beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const7877 VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, 7878 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7879 { 7880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7881 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7882 VULKAN_HPP_ASSERT( d.vkCmdBeginRendering && "Function <vkCmdBeginRendering> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" ); 7883 # endif 7884 7885 d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); 7886 } 7887 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7888 7889 template <typename Dispatch> endRendering(Dispatch const & d) const7890 VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7891 { 7892 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7893 d.vkCmdEndRendering( m_commandBuffer ); 7894 } 7895 7896 template <typename Dispatch> setCullMode(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const7897 VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7898 { 7899 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7900 d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); 7901 } 7902 7903 template <typename Dispatch> setFrontFace(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const7904 VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7905 { 7906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7907 d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); 7908 } 7909 7910 template <typename Dispatch> setPrimitiveTopology(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const7911 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, 7912 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7913 { 7914 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7915 d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); 7916 } 7917 7918 template <typename Dispatch> setViewportWithCount(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const7919 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, 7920 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 7921 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7922 { 7923 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7924 d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 7925 } 7926 7927 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7928 template <typename Dispatch> setViewportWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const7929 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 7930 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7931 { 7932 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7933 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7934 VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCount && 7935 "Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 7936 # endif 7937 7938 d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 7939 } 7940 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7941 7942 template <typename Dispatch> 7943 VULKAN_HPP_INLINE void setScissorWithCount(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const7944 CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7945 { 7946 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7947 d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 7948 } 7949 7950 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7951 template <typename Dispatch> setScissorWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const7952 VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 7953 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7954 { 7955 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7956 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7957 VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCount && 7958 "Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 7959 # endif 7960 7961 d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 7962 } 7963 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7964 7965 template <typename Dispatch> bindVertexBuffers2(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const7966 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, 7967 uint32_t bindingCount, 7968 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 7969 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 7970 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 7971 const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, 7972 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7973 { 7974 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7975 d.vkCmdBindVertexBuffers2( m_commandBuffer, 7976 firstBinding, 7977 bindingCount, 7978 reinterpret_cast<const VkBuffer *>( pBuffers ), 7979 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 7980 reinterpret_cast<const VkDeviceSize *>( pSizes ), 7981 reinterpret_cast<const VkDeviceSize *>( pStrides ) ); 7982 } 7983 7984 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7985 template <typename Dispatch> bindVertexBuffers2(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const7986 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, 7987 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 7988 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 7989 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 7990 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, 7991 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 7992 { 7993 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7994 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7995 VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2 && 7996 "Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 7997 # endif 7998 # ifdef VULKAN_HPP_NO_EXCEPTIONS 7999 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 8000 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 8001 VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); 8002 # else 8003 if ( buffers.size() != offsets.size() ) 8004 { 8005 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); 8006 } 8007 if ( !sizes.empty() && buffers.size() != sizes.size() ) 8008 { 8009 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); 8010 } 8011 if ( !strides.empty() && buffers.size() != strides.size() ) 8012 { 8013 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); 8014 } 8015 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 8016 8017 d.vkCmdBindVertexBuffers2( m_commandBuffer, 8018 firstBinding, 8019 buffers.size(), 8020 reinterpret_cast<const VkBuffer *>( buffers.data() ), 8021 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 8022 reinterpret_cast<const VkDeviceSize *>( sizes.data() ), 8023 reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); 8024 } 8025 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8026 8027 template <typename Dispatch> setDepthTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const8028 VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8029 { 8030 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8031 d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); 8032 } 8033 8034 template <typename Dispatch> setDepthWriteEnable(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const8035 VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8036 { 8037 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8038 d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); 8039 } 8040 8041 template <typename Dispatch> setDepthCompareOp(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const8042 VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8043 { 8044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8045 d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); 8046 } 8047 8048 template <typename Dispatch> setDepthBoundsTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const8049 VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, 8050 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8051 { 8052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8053 d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); 8054 } 8055 8056 template <typename Dispatch> setStencilTestEnable(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const8057 VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8058 { 8059 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8060 d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); 8061 } 8062 8063 template <typename Dispatch> setStencilOp(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const8064 VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 8065 VULKAN_HPP_NAMESPACE::StencilOp failOp, 8066 VULKAN_HPP_NAMESPACE::StencilOp passOp, 8067 VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, 8068 VULKAN_HPP_NAMESPACE::CompareOp compareOp, 8069 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8070 { 8071 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8072 d.vkCmdSetStencilOp( m_commandBuffer, 8073 static_cast<VkStencilFaceFlags>( faceMask ), 8074 static_cast<VkStencilOp>( failOp ), 8075 static_cast<VkStencilOp>( passOp ), 8076 static_cast<VkStencilOp>( depthFailOp ), 8077 static_cast<VkCompareOp>( compareOp ) ); 8078 } 8079 8080 template <typename Dispatch> setRasterizerDiscardEnable(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const8081 VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, 8082 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8083 { 8084 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8085 d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); 8086 } 8087 8088 template <typename Dispatch> setDepthBiasEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const8089 VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8090 { 8091 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8092 d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); 8093 } 8094 8095 template <typename Dispatch> setPrimitiveRestartEnable(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const8096 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, 8097 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8098 { 8099 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8100 d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); 8101 } 8102 8103 template <typename Dispatch> getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const8104 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, 8105 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 8106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8107 { 8108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8109 d.vkGetDeviceBufferMemoryRequirements( 8110 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 8111 } 8112 8113 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8114 template <typename Dispatch> 8115 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const8116 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8117 { 8118 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8119 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8120 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && 8121 "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8122 # endif 8123 8124 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 8125 d.vkGetDeviceBufferMemoryRequirements( 8126 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8127 8128 return memoryRequirements; 8129 } 8130 8131 template <typename X, typename Y, typename... Z, typename Dispatch> 8132 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const8133 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8134 { 8135 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8136 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8137 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && 8138 "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8139 # endif 8140 8141 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8142 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 8143 d.vkGetDeviceBufferMemoryRequirements( 8144 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8145 8146 return structureChain; 8147 } 8148 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8149 8150 template <typename Dispatch> getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const8151 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 8152 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 8153 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8154 { 8155 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8156 d.vkGetDeviceImageMemoryRequirements( 8157 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 8158 } 8159 8160 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8161 template <typename Dispatch> 8162 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const8163 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8164 { 8165 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8166 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8167 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && 8168 "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8169 # endif 8170 8171 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 8172 d.vkGetDeviceImageMemoryRequirements( 8173 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8174 8175 return memoryRequirements; 8176 } 8177 8178 template <typename X, typename Y, typename... Z, typename Dispatch> 8179 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const8180 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8181 { 8182 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8183 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8184 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && 8185 "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8186 # endif 8187 8188 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8189 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 8190 d.vkGetDeviceImageMemoryRequirements( 8191 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8192 8193 return structureChain; 8194 } 8195 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8196 8197 template <typename Dispatch> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const8198 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 8199 uint32_t * pSparseMemoryRequirementCount, 8200 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 8201 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8202 { 8203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8204 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 8205 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 8206 pSparseMemoryRequirementCount, 8207 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 8208 } 8209 8210 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8211 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 8212 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const8213 Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const 8214 { 8215 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8216 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8217 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && 8218 "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8219 # endif 8220 8221 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 8222 uint32_t sparseMemoryRequirementCount; 8223 d.vkGetDeviceImageSparseMemoryRequirements( 8224 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 8225 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8226 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 8227 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 8228 &sparseMemoryRequirementCount, 8229 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 8230 8231 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 8232 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 8233 { 8234 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8235 } 8236 return sparseMemoryRequirements; 8237 } 8238 8239 template <typename SparseImageMemoryRequirements2Allocator, 8240 typename Dispatch, 8241 typename std::enable_if< 8242 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 8243 int>::type> 8244 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const8245 Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, 8246 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 8247 Dispatch const & d ) const 8248 { 8249 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8250 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8251 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && 8252 "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8253 # endif 8254 8255 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 8256 sparseImageMemoryRequirements2Allocator ); 8257 uint32_t sparseMemoryRequirementCount; 8258 d.vkGetDeviceImageSparseMemoryRequirements( 8259 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 8260 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8261 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 8262 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 8263 &sparseMemoryRequirementCount, 8264 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 8265 8266 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 8267 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 8268 { 8269 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8270 } 8271 return sparseMemoryRequirements; 8272 } 8273 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8274 8275 //=== VK_VERSION_1_4 === 8276 8277 template <typename Dispatch> setLineStipple(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const8278 VULKAN_HPP_INLINE void CommandBuffer::setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8279 { 8280 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8281 d.vkCmdSetLineStipple( m_commandBuffer, lineStippleFactor, lineStipplePattern ); 8282 } 8283 8284 template <typename Dispatch> mapMemory2(const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo,void ** ppData,Dispatch const & d) const8285 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, 8286 void ** ppData, 8287 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8288 { 8289 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8290 return static_cast<Result>( d.vkMapMemory2( m_device, reinterpret_cast<const VkMemoryMapInfo *>( pMemoryMapInfo ), ppData ) ); 8291 } 8292 8293 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8294 template <typename Dispatch> mapMemory2(const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo,Dispatch const & d) const8295 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, 8296 Dispatch const & d ) const 8297 { 8298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8299 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8300 VULKAN_HPP_ASSERT( d.vkMapMemory2 && "Function <vkMapMemory2> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); 8301 # endif 8302 8303 void * pData; 8304 VULKAN_HPP_NAMESPACE::Result result = 8305 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory2( m_device, reinterpret_cast<const VkMemoryMapInfo *>( &memoryMapInfo ), &pData ) ); 8306 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2" ); 8307 8308 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); 8309 } 8310 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8311 8312 template <typename Dispatch> unmapMemory2(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo,Dispatch const & d) const8313 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, 8314 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8315 { 8316 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8317 return static_cast<Result>( d.vkUnmapMemory2( m_device, reinterpret_cast<const VkMemoryUnmapInfo *>( pMemoryUnmapInfo ) ) ); 8318 } 8319 8320 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8321 template <typename Dispatch> unmapMemory2(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo,Dispatch const & d) const8322 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, 8323 Dispatch const & d ) const 8324 { 8325 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8326 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8327 VULKAN_HPP_ASSERT( d.vkUnmapMemory2 && "Function <vkUnmapMemory2> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); 8328 # endif 8329 8330 VULKAN_HPP_NAMESPACE::Result result = 8331 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkUnmapMemory2( m_device, reinterpret_cast<const VkMemoryUnmapInfo *>( &memoryUnmapInfo ) ) ); 8332 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2" ); 8333 8334 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8335 } 8336 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8337 8338 template <typename Dispatch> bindIndexBuffer2(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const8339 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, 8340 VULKAN_HPP_NAMESPACE::DeviceSize offset, 8341 VULKAN_HPP_NAMESPACE::DeviceSize size, 8342 VULKAN_HPP_NAMESPACE::IndexType indexType, 8343 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8344 { 8345 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8346 d.vkCmdBindIndexBuffer2( m_commandBuffer, 8347 static_cast<VkBuffer>( buffer ), 8348 static_cast<VkDeviceSize>( offset ), 8349 static_cast<VkDeviceSize>( size ), 8350 static_cast<VkIndexType>( indexType ) ); 8351 } 8352 8353 template <typename Dispatch> getRenderingAreaGranularity(const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const8354 VULKAN_HPP_INLINE void Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, 8355 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 8356 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8357 { 8358 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8359 d.vkGetRenderingAreaGranularity( 8360 m_device, reinterpret_cast<const VkRenderingAreaInfo *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 8361 } 8362 8363 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8364 template <typename Dispatch> 8365 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D getRenderingAreaGranularity(const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo,Dispatch const & d) const8366 Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8367 { 8368 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8369 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8370 VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularity && "Function <vkGetRenderingAreaGranularity> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8371 # endif 8372 8373 VULKAN_HPP_NAMESPACE::Extent2D granularity; 8374 d.vkGetRenderingAreaGranularity( 8375 m_device, reinterpret_cast<const VkRenderingAreaInfo *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 8376 8377 return granularity; 8378 } 8379 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8380 8381 template <typename Dispatch> getImageSubresourceLayout(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const8382 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, 8383 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 8384 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8385 { 8386 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8387 d.vkGetDeviceImageSubresourceLayout( 8388 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( pInfo ), reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 8389 } 8390 8391 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8392 template <typename Dispatch> 8393 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 getImageSubresourceLayout(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info,Dispatch const & d) const8394 Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8395 { 8396 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8397 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8398 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && 8399 "Function <vkGetDeviceImageSubresourceLayout> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8400 # endif 8401 8402 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 8403 d.vkGetDeviceImageSubresourceLayout( 8404 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 8405 8406 return layout; 8407 } 8408 8409 template <typename X, typename Y, typename... Z, typename Dispatch> 8410 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageSubresourceLayout(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info,Dispatch const & d) const8411 Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8412 { 8413 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8414 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8415 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && 8416 "Function <vkGetDeviceImageSubresourceLayout> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8417 # endif 8418 8419 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8420 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 8421 d.vkGetDeviceImageSubresourceLayout( 8422 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 8423 8424 return structureChain; 8425 } 8426 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8427 8428 template <typename Dispatch> getImageSubresourceLayout2(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const8429 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, 8430 const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, 8431 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 8432 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8433 { 8434 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8435 d.vkGetImageSubresourceLayout2( m_device, 8436 static_cast<VkImage>( image ), 8437 reinterpret_cast<const VkImageSubresource2 *>( pSubresource ), 8438 reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 8439 } 8440 8441 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8442 template <typename Dispatch> getImageSubresourceLayout2(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const8443 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2( 8444 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8445 { 8446 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8447 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8448 VULKAN_HPP_ASSERT( 8449 d.vkGetImageSubresourceLayout2 && 8450 "Function <vkGetImageSubresourceLayout2> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8451 # endif 8452 8453 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 8454 d.vkGetImageSubresourceLayout2( m_device, 8455 static_cast<VkImage>( image ), 8456 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 8457 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 8458 8459 return layout; 8460 } 8461 8462 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const8463 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2( 8464 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8465 { 8466 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8467 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8468 VULKAN_HPP_ASSERT( 8469 d.vkGetImageSubresourceLayout2 && 8470 "Function <vkGetImageSubresourceLayout2> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8471 # endif 8472 8473 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8474 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 8475 d.vkGetImageSubresourceLayout2( m_device, 8476 static_cast<VkImage>( image ), 8477 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 8478 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 8479 8480 return structureChain; 8481 } 8482 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8483 8484 template <typename Dispatch> pushDescriptorSet(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,Dispatch const & d) const8485 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 8486 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 8487 uint32_t set, 8488 uint32_t descriptorWriteCount, 8489 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 8490 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8491 { 8492 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8493 d.vkCmdPushDescriptorSet( m_commandBuffer, 8494 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 8495 static_cast<VkPipelineLayout>( layout ), 8496 set, 8497 descriptorWriteCount, 8498 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); 8499 } 8500 8501 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8502 template <typename Dispatch> 8503 VULKAN_HPP_INLINE void pushDescriptorSet(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,Dispatch const & d) const8504 CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 8505 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 8506 uint32_t set, 8507 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 8508 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8509 { 8510 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8511 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8512 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet && "Function <vkCmdPushDescriptorSet> requires <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); 8513 # endif 8514 8515 d.vkCmdPushDescriptorSet( m_commandBuffer, 8516 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 8517 static_cast<VkPipelineLayout>( layout ), 8518 set, 8519 descriptorWrites.size(), 8520 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); 8521 } 8522 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8523 8524 template <typename Dispatch> pushDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,const void * pData,Dispatch const & d) const8525 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 8526 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 8527 uint32_t set, 8528 const void * pData, 8529 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8530 { 8531 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8532 d.vkCmdPushDescriptorSetWithTemplate( 8533 m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData ); 8534 } 8535 8536 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8537 template <typename DataType, typename Dispatch> pushDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,DataType const & data,Dispatch const & d) const8538 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 8539 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 8540 uint32_t set, 8541 DataType const & data, 8542 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8543 { 8544 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8545 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8546 VULKAN_HPP_ASSERT( 8547 d.vkCmdPushDescriptorSetWithTemplate && 8548 "Function <vkCmdPushDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); 8549 # endif 8550 8551 d.vkCmdPushDescriptorSetWithTemplate( m_commandBuffer, 8552 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 8553 static_cast<VkPipelineLayout>( layout ), 8554 set, 8555 reinterpret_cast<const void *>( &data ) ); 8556 } 8557 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8558 8559 template <typename Dispatch> setRenderingAttachmentLocations(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo,Dispatch const & d) const8560 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, 8561 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8562 { 8563 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8564 d.vkCmdSetRenderingAttachmentLocations( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( pLocationInfo ) ); 8565 } 8566 8567 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8568 template <typename Dispatch> setRenderingAttachmentLocations(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo,Dispatch const & d) const8569 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, 8570 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8571 { 8572 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8573 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8574 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocations && 8575 "Function <vkCmdSetRenderingAttachmentLocations> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); 8576 # endif 8577 8578 d.vkCmdSetRenderingAttachmentLocations( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( &locationInfo ) ); 8579 } 8580 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8581 8582 template <typename Dispatch> 8583 VULKAN_HPP_INLINE void setRenderingInputAttachmentIndices(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo,Dispatch const & d) const8584 CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, 8585 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8586 { 8587 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8588 d.vkCmdSetRenderingInputAttachmentIndices( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( pInputAttachmentIndexInfo ) ); 8589 } 8590 8591 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8592 template <typename Dispatch> 8593 VULKAN_HPP_INLINE void setRenderingInputAttachmentIndices(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo,Dispatch const & d) const8594 CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, 8595 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8596 { 8597 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8599 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndices && 8600 "Function <vkCmdSetRenderingInputAttachmentIndices> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); 8601 # endif 8602 8603 d.vkCmdSetRenderingInputAttachmentIndices( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( &inputAttachmentIndexInfo ) ); 8604 } 8605 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8606 8607 template <typename Dispatch> bindDescriptorSets2(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo,Dispatch const & d) const8608 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, 8609 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8610 { 8611 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8612 d.vkCmdBindDescriptorSets2( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfo *>( pBindDescriptorSetsInfo ) ); 8613 } 8614 8615 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8616 template <typename Dispatch> bindDescriptorSets2(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo,Dispatch const & d) const8617 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, 8618 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8619 { 8620 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8621 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8622 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2 && "Function <vkCmdBindDescriptorSets2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 8623 # endif 8624 8625 d.vkCmdBindDescriptorSets2( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfo *>( &bindDescriptorSetsInfo ) ); 8626 } 8627 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8628 8629 template <typename Dispatch> pushConstants2(const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo,Dispatch const & d) const8630 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, 8631 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8632 { 8633 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8634 d.vkCmdPushConstants2( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfo *>( pPushConstantsInfo ) ); 8635 } 8636 8637 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8638 template <typename Dispatch> pushConstants2(const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo,Dispatch const & d) const8639 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, 8640 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8641 { 8642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8644 VULKAN_HPP_ASSERT( d.vkCmdPushConstants2 && "Function <vkCmdPushConstants2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 8645 # endif 8646 8647 d.vkCmdPushConstants2( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfo *>( &pushConstantsInfo ) ); 8648 } 8649 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8650 8651 template <typename Dispatch> pushDescriptorSet2(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo,Dispatch const & d) const8652 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, 8653 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8654 { 8655 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8656 d.vkCmdPushDescriptorSet2( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfo *>( pPushDescriptorSetInfo ) ); 8657 } 8658 8659 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8660 template <typename Dispatch> pushDescriptorSet2(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo,Dispatch const & d) const8661 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, 8662 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8663 { 8664 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8665 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8666 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2 && "Function <vkCmdPushDescriptorSet2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 8667 # endif 8668 8669 d.vkCmdPushDescriptorSet2( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfo *>( &pushDescriptorSetInfo ) ); 8670 } 8671 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8672 8673 template <typename Dispatch> 8674 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo,Dispatch const & d) const8675 CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, 8676 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8677 { 8678 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8679 d.vkCmdPushDescriptorSetWithTemplate2( m_commandBuffer, 8680 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( pPushDescriptorSetWithTemplateInfo ) ); 8681 } 8682 8683 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8684 template <typename Dispatch> 8685 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo,Dispatch const & d) const8686 CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, 8687 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8688 { 8689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8690 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8691 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2 && 8692 "Function <vkCmdPushDescriptorSetWithTemplate2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 8693 # endif 8694 8695 d.vkCmdPushDescriptorSetWithTemplate2( m_commandBuffer, 8696 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( &pushDescriptorSetWithTemplateInfo ) ); 8697 } 8698 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8699 8700 template <typename Dispatch> copyMemoryToImage(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo,Dispatch const & d) const8701 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, 8702 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8703 { 8704 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8705 return static_cast<Result>( d.vkCopyMemoryToImage( m_device, reinterpret_cast<const VkCopyMemoryToImageInfo *>( pCopyMemoryToImageInfo ) ) ); 8706 } 8707 8708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8709 template <typename Dispatch> 8710 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyMemoryToImage(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo,Dispatch const & d) const8711 Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const 8712 { 8713 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8714 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8715 VULKAN_HPP_ASSERT( d.vkCopyMemoryToImage && "Function <vkCopyMemoryToImage> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 8716 # endif 8717 8718 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8719 d.vkCopyMemoryToImage( m_device, reinterpret_cast<const VkCopyMemoryToImageInfo *>( ©MemoryToImageInfo ) ) ); 8720 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImage" ); 8721 8722 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8723 } 8724 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8725 8726 template <typename Dispatch> copyImageToMemory(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo,Dispatch const & d) const8727 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, 8728 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8729 { 8730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8731 return static_cast<Result>( d.vkCopyImageToMemory( m_device, reinterpret_cast<const VkCopyImageToMemoryInfo *>( pCopyImageToMemoryInfo ) ) ); 8732 } 8733 8734 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8735 template <typename Dispatch> 8736 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToMemory(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo,Dispatch const & d) const8737 Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const 8738 { 8739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8740 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8741 VULKAN_HPP_ASSERT( d.vkCopyImageToMemory && "Function <vkCopyImageToMemory> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 8742 # endif 8743 8744 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8745 d.vkCopyImageToMemory( m_device, reinterpret_cast<const VkCopyImageToMemoryInfo *>( ©ImageToMemoryInfo ) ) ); 8746 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemory" ); 8747 8748 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8749 } 8750 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8751 8752 template <typename Dispatch> copyImageToImage(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo,Dispatch const & d) const8753 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, 8754 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8755 { 8756 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8757 return static_cast<Result>( d.vkCopyImageToImage( m_device, reinterpret_cast<const VkCopyImageToImageInfo *>( pCopyImageToImageInfo ) ) ); 8758 } 8759 8760 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8761 template <typename Dispatch> 8762 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToImage(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo,Dispatch const & d) const8763 Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const 8764 { 8765 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8766 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8767 VULKAN_HPP_ASSERT( d.vkCopyImageToImage && "Function <vkCopyImageToImage> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 8768 # endif 8769 8770 VULKAN_HPP_NAMESPACE::Result result = 8771 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyImageToImage( m_device, reinterpret_cast<const VkCopyImageToImageInfo *>( ©ImageToImageInfo ) ) ); 8772 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImage" ); 8773 8774 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8775 } 8776 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8777 8778 template <typename Dispatch> transitionImageLayout(uint32_t transitionCount,const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions,Dispatch const & d) const8779 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayout( uint32_t transitionCount, 8780 const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, 8781 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8782 { 8783 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8784 return static_cast<Result>( 8785 d.vkTransitionImageLayout( m_device, transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( pTransitions ) ) ); 8786 } 8787 8788 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8789 template <typename Dispatch> 8790 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type transitionImageLayout(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo> const & transitions,Dispatch const & d) const8791 Device::transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo> const & transitions, 8792 Dispatch const & d ) const 8793 { 8794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8795 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8796 VULKAN_HPP_ASSERT( d.vkTransitionImageLayout && "Function <vkTransitionImageLayout> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 8797 # endif 8798 8799 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8800 d.vkTransitionImageLayout( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( transitions.data() ) ) ); 8801 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayout" ); 8802 8803 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8804 } 8805 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8806 8807 //=== VK_KHR_surface === 8808 8809 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8810 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8811 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8812 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8813 { 8814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8815 d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8816 } 8817 8818 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8819 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8820 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8821 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8822 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8823 { 8824 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8825 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8826 VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" ); 8827 # endif 8828 8829 d.vkDestroySurfaceKHR( m_instance, 8830 static_cast<VkSurfaceKHR>( surface ), 8831 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8832 } 8833 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8834 8835 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8836 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8837 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8838 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8839 { 8840 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8841 d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8842 } 8843 8844 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8845 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8846 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8847 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8848 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8849 { 8850 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8851 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8852 VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" ); 8853 # endif 8854 8855 d.vkDestroySurfaceKHR( m_instance, 8856 static_cast<VkSurfaceKHR>( surface ), 8857 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8858 } 8859 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8860 8861 template <typename Dispatch> getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::Bool32 * pSupported,Dispatch const & d) const8862 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, 8863 VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8864 VULKAN_HPP_NAMESPACE::Bool32 * pSupported, 8865 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8866 { 8867 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8868 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( 8869 m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) ); 8870 } 8871 8872 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8873 template <typename Dispatch> 8874 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8875 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8876 { 8877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8878 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8879 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceSupportKHR && "Function <vkGetPhysicalDeviceSurfaceSupportKHR> requires <VK_KHR_surface>" ); 8880 # endif 8881 8882 VULKAN_HPP_NAMESPACE::Bool32 supported; 8883 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( 8884 m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) ) ); 8885 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); 8886 8887 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( supported ) ); 8888 } 8889 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8890 8891 template <typename Dispatch> getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,Dispatch const & d) const8892 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8893 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, 8894 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8895 { 8896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8897 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 8898 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) ); 8899 } 8900 8901 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8902 template <typename Dispatch> 8903 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8904 PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8905 { 8906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8907 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8908 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR && "Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> requires <VK_KHR_surface>" ); 8909 # endif 8910 8911 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; 8912 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 8913 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) ); 8914 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); 8915 8916 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); 8917 } 8918 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8919 8920 template <typename Dispatch> getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,Dispatch const & d) const8921 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8922 uint32_t * pSurfaceFormatCount, 8923 VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, 8924 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8925 { 8926 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8927 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 8928 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) ); 8929 } 8930 8931 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8932 template <typename SurfaceFormatKHRAllocator, typename Dispatch> 8933 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8934 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8935 { 8936 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8937 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8938 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" ); 8939 # endif 8940 8941 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats; 8942 uint32_t surfaceFormatCount; 8943 VULKAN_HPP_NAMESPACE::Result result; 8944 do 8945 { 8946 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8947 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 8948 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 8949 { 8950 surfaceFormats.resize( surfaceFormatCount ); 8951 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 8952 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 8953 } 8954 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8955 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 8956 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 8957 if ( surfaceFormatCount < surfaceFormats.size() ) 8958 { 8959 surfaceFormats.resize( surfaceFormatCount ); 8960 } 8961 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 8962 } 8963 8964 template <typename SurfaceFormatKHRAllocator, 8965 typename Dispatch, 8966 typename std::enable_if<std::is_same<typename SurfaceFormatKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, int>::type> 8967 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,Dispatch const & d) const8968 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8969 SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, 8970 Dispatch const & d ) const 8971 { 8972 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8973 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8974 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" ); 8975 # endif 8976 8977 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator ); 8978 uint32_t surfaceFormatCount; 8979 VULKAN_HPP_NAMESPACE::Result result; 8980 do 8981 { 8982 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8983 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 8984 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 8985 { 8986 surfaceFormats.resize( surfaceFormatCount ); 8987 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 8988 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 8989 } 8990 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8991 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 8992 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 8993 if ( surfaceFormatCount < surfaceFormats.size() ) 8994 { 8995 surfaceFormats.resize( surfaceFormatCount ); 8996 } 8997 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 8998 } 8999 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9000 9001 template <typename Dispatch> getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const9002 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9003 uint32_t * pPresentModeCount, 9004 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 9005 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9006 { 9007 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9008 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 9009 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 9010 } 9011 9012 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9013 template <typename PresentModeKHRAllocator, typename Dispatch> 9014 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const9015 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 9016 { 9017 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9018 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9019 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" ); 9020 # endif 9021 9022 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes; 9023 uint32_t presentModeCount; 9024 VULKAN_HPP_NAMESPACE::Result result; 9025 do 9026 { 9027 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9028 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 9029 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 9030 { 9031 presentModes.resize( presentModeCount ); 9032 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 9033 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 9034 } 9035 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9036 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 9037 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 9038 if ( presentModeCount < presentModes.size() ) 9039 { 9040 presentModes.resize( presentModeCount ); 9041 } 9042 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 9043 } 9044 9045 template <typename PresentModeKHRAllocator, 9046 typename Dispatch, 9047 typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 9048 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const9049 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9050 PresentModeKHRAllocator & presentModeKHRAllocator, 9051 Dispatch const & d ) const 9052 { 9053 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9054 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9055 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" ); 9056 # endif 9057 9058 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 9059 uint32_t presentModeCount; 9060 VULKAN_HPP_NAMESPACE::Result result; 9061 do 9062 { 9063 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9064 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 9065 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 9066 { 9067 presentModes.resize( presentModeCount ); 9068 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 9069 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 9070 } 9071 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9072 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 9073 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 9074 if ( presentModeCount < presentModes.size() ) 9075 { 9076 presentModes.resize( presentModeCount ); 9077 } 9078 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 9079 } 9080 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9081 9082 //=== VK_KHR_swapchain === 9083 9084 template <typename Dispatch> createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,Dispatch const & d) const9085 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, 9086 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9087 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, 9088 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9089 { 9090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9091 return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, 9092 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), 9093 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9094 reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) ); 9095 } 9096 9097 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9098 template <typename Dispatch> 9099 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9100 Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 9101 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9102 Dispatch const & d ) const 9103 { 9104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9105 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9106 VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" ); 9107 # endif 9108 9109 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 9110 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9111 d.vkCreateSwapchainKHR( m_device, 9112 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 9113 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9114 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 9115 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); 9116 9117 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); 9118 } 9119 9120 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9121 template <typename Dispatch> 9122 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9123 Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 9124 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9125 Dispatch const & d ) const 9126 { 9127 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9128 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9129 VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" ); 9130 # endif 9131 9132 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 9133 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9134 d.vkCreateSwapchainKHR( m_device, 9135 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 9136 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9137 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 9138 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); 9139 9140 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9141 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 9142 } 9143 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9144 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9145 9146 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9147 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9148 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9149 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9150 { 9151 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9152 d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9153 } 9154 9155 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9156 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9157 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9158 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9159 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9160 { 9161 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9162 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9163 VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" ); 9164 # endif 9165 9166 d.vkDestroySwapchainKHR( m_device, 9167 static_cast<VkSwapchainKHR>( swapchain ), 9168 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9169 } 9170 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9171 9172 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9173 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9174 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9175 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9176 { 9177 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9178 d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9179 } 9180 9181 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9182 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9183 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9184 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9185 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9186 { 9187 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9188 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9189 VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" ); 9190 # endif 9191 9192 d.vkDestroySwapchainKHR( m_device, 9193 static_cast<VkSwapchainKHR>( swapchain ), 9194 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9195 } 9196 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9197 9198 template <typename Dispatch> getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,Dispatch const & d) const9199 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9200 uint32_t * pSwapchainImageCount, 9201 VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, 9202 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9203 { 9204 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9205 return static_cast<Result>( 9206 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) ); 9207 } 9208 9209 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9210 template <typename ImageAllocator, typename Dispatch> 9211 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const9212 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 9213 { 9214 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9215 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9216 VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" ); 9217 # endif 9218 9219 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages; 9220 uint32_t swapchainImageCount; 9221 VULKAN_HPP_NAMESPACE::Result result; 9222 do 9223 { 9224 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9225 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 9226 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) 9227 { 9228 swapchainImages.resize( swapchainImageCount ); 9229 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR( 9230 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 9231 } 9232 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9233 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 9234 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 9235 if ( swapchainImageCount < swapchainImages.size() ) 9236 { 9237 swapchainImages.resize( swapchainImageCount ); 9238 } 9239 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); 9240 } 9241 9242 template <typename ImageAllocator, 9243 typename Dispatch, 9244 typename std::enable_if<std::is_same<typename ImageAllocator::value_type, VULKAN_HPP_NAMESPACE::Image>::value, int>::type> 9245 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,ImageAllocator & imageAllocator,Dispatch const & d) const9246 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const 9247 { 9248 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9249 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9250 VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" ); 9251 # endif 9252 9253 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator ); 9254 uint32_t swapchainImageCount; 9255 VULKAN_HPP_NAMESPACE::Result result; 9256 do 9257 { 9258 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9259 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 9260 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) 9261 { 9262 swapchainImages.resize( swapchainImageCount ); 9263 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR( 9264 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 9265 } 9266 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9267 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 9268 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 9269 if ( swapchainImageCount < swapchainImages.size() ) 9270 { 9271 swapchainImages.resize( swapchainImageCount ); 9272 } 9273 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); 9274 } 9275 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9276 9277 template <typename Dispatch> acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,uint32_t * pImageIndex,Dispatch const & d) const9278 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9279 uint64_t timeout, 9280 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 9281 VULKAN_HPP_NAMESPACE::Fence fence, 9282 uint32_t * pImageIndex, 9283 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9284 { 9285 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9286 return static_cast<Result>( d.vkAcquireNextImageKHR( 9287 m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) ); 9288 } 9289 9290 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9291 template <typename Dispatch> acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const9292 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9293 uint64_t timeout, 9294 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 9295 VULKAN_HPP_NAMESPACE::Fence fence, 9296 Dispatch const & d ) const 9297 { 9298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9299 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9300 VULKAN_HPP_ASSERT( d.vkAcquireNextImageKHR && "Function <vkAcquireNextImageKHR> requires <VK_KHR_swapchain>" ); 9301 # endif 9302 9303 uint32_t imageIndex; 9304 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireNextImageKHR( 9305 m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) ); 9306 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 9307 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", 9308 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 9309 VULKAN_HPP_NAMESPACE::Result::eTimeout, 9310 VULKAN_HPP_NAMESPACE::Result::eNotReady, 9311 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 9312 9313 return ResultValue<uint32_t>( result, std::move( imageIndex ) ); 9314 } 9315 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9316 9317 template <typename Dispatch> presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,Dispatch const & d) const9318 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, 9319 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9320 { 9321 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9322 return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) ); 9323 } 9324 9325 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9326 template <typename Dispatch> presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,Dispatch const & d) const9327 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, 9328 Dispatch const & d ) const 9329 { 9330 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9331 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9332 VULKAN_HPP_ASSERT( d.vkQueuePresentKHR && "Function <vkQueuePresentKHR> requires <VK_KHR_swapchain>" ); 9333 # endif 9334 9335 VULKAN_HPP_NAMESPACE::Result result = 9336 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) ); 9337 VULKAN_HPP_NAMESPACE::detail::resultCheck( 9338 result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 9339 9340 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 9341 } 9342 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9343 9344 template <typename Dispatch> getGroupPresentCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,Dispatch const & d) const9345 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( 9346 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9347 { 9348 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9349 return static_cast<Result>( 9350 d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) ); 9351 } 9352 9353 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9354 template <typename Dispatch> 9355 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const & d) const9356 Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const 9357 { 9358 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9359 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9360 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPresentCapabilitiesKHR && 9361 "Function <vkGetDeviceGroupPresentCapabilitiesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9362 # endif 9363 9364 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; 9365 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9366 d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) ); 9367 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); 9368 9369 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); 9370 } 9371 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9372 9373 template <typename Dispatch> getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const9374 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9375 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 9376 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9377 { 9378 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9379 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( 9380 m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 9381 } 9382 9383 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9384 template <typename Dispatch> 9385 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const9386 Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 9387 { 9388 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9389 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9390 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModesKHR && 9391 "Function <vkGetDeviceGroupSurfacePresentModesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9392 # endif 9393 9394 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 9395 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( 9396 m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 9397 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); 9398 9399 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); 9400 } 9401 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9402 9403 template <typename Dispatch> getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pRectCount,VULKAN_HPP_NAMESPACE::Rect2D * pRects,Dispatch const & d) const9404 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9405 uint32_t * pRectCount, 9406 VULKAN_HPP_NAMESPACE::Rect2D * pRects, 9407 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9408 { 9409 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9410 return static_cast<Result>( 9411 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) ); 9412 } 9413 9414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9415 template <typename Rect2DAllocator, typename Dispatch> 9416 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const9417 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 9418 { 9419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9420 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9421 VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && 9422 "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9423 # endif 9424 9425 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects; 9426 uint32_t rectCount; 9427 VULKAN_HPP_NAMESPACE::Result result; 9428 do 9429 { 9430 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9431 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 9432 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) 9433 { 9434 rects.resize( rectCount ); 9435 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 9436 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 9437 } 9438 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9439 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 9440 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 9441 if ( rectCount < rects.size() ) 9442 { 9443 rects.resize( rectCount ); 9444 } 9445 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); 9446 } 9447 9448 template <typename Rect2DAllocator, 9449 typename Dispatch, 9450 typename std::enable_if<std::is_same<typename Rect2DAllocator::value_type, VULKAN_HPP_NAMESPACE::Rect2D>::value, int>::type> 9451 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Rect2DAllocator & rect2DAllocator,Dispatch const & d) const9452 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const 9453 { 9454 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9455 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9456 VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && 9457 "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9458 # endif 9459 9460 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator ); 9461 uint32_t rectCount; 9462 VULKAN_HPP_NAMESPACE::Result result; 9463 do 9464 { 9465 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9466 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 9467 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) 9468 { 9469 rects.resize( rectCount ); 9470 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 9471 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 9472 } 9473 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9474 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 9475 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 9476 if ( rectCount < rects.size() ) 9477 { 9478 rects.resize( rectCount ); 9479 } 9480 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); 9481 } 9482 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9483 9484 template <typename Dispatch> acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex,Dispatch const & d) const9485 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, 9486 uint32_t * pImageIndex, 9487 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9488 { 9489 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9490 return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) ); 9491 } 9492 9493 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9494 template <typename Dispatch> acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,Dispatch const & d) const9495 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, 9496 Dispatch const & d ) const 9497 { 9498 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9499 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9500 VULKAN_HPP_ASSERT( d.vkAcquireNextImage2KHR && "Function <vkAcquireNextImage2KHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9501 # endif 9502 9503 uint32_t imageIndex; 9504 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9505 d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) ); 9506 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 9507 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", 9508 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 9509 VULKAN_HPP_NAMESPACE::Result::eTimeout, 9510 VULKAN_HPP_NAMESPACE::Result::eNotReady, 9511 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 9512 9513 return ResultValue<uint32_t>( result, std::move( imageIndex ) ); 9514 } 9515 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9516 9517 //=== VK_KHR_display === 9518 9519 template <typename Dispatch> getDisplayPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,Dispatch const & d) const9520 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, 9521 VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, 9522 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9523 { 9524 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9525 return static_cast<Result>( 9526 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) ); 9527 } 9528 9529 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9530 template <typename DisplayPropertiesKHRAllocator, typename Dispatch> 9531 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(Dispatch const & d) const9532 PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const 9533 { 9534 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9535 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9536 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" ); 9537 # endif 9538 9539 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties; 9540 uint32_t propertyCount; 9541 VULKAN_HPP_NAMESPACE::Result result; 9542 do 9543 { 9544 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9545 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9546 { 9547 properties.resize( propertyCount ); 9548 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9549 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 9550 } 9551 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9552 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 9553 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9554 if ( propertyCount < properties.size() ) 9555 { 9556 properties.resize( propertyCount ); 9557 } 9558 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9559 } 9560 9561 template < 9562 typename DisplayPropertiesKHRAllocator, 9563 typename Dispatch, 9564 typename std::enable_if<std::is_same<typename DisplayPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, int>::type> 9565 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,Dispatch const & d) const9566 PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const 9567 { 9568 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9569 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9570 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" ); 9571 # endif 9572 9573 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator ); 9574 uint32_t propertyCount; 9575 VULKAN_HPP_NAMESPACE::Result result; 9576 do 9577 { 9578 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9579 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9580 { 9581 properties.resize( propertyCount ); 9582 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9583 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 9584 } 9585 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9586 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 9587 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9588 if ( propertyCount < properties.size() ) 9589 { 9590 properties.resize( propertyCount ); 9591 } 9592 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9593 } 9594 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9595 9596 template <typename Dispatch> getDisplayPlanePropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,Dispatch const & d) const9597 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, 9598 VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, 9599 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9600 { 9601 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9602 return static_cast<Result>( 9603 d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) ); 9604 } 9605 9606 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9607 template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch> 9608 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9609 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(Dispatch const & d) const9610 PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const 9611 { 9612 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9613 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9614 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" ); 9615 # endif 9616 9617 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties; 9618 uint32_t propertyCount; 9619 VULKAN_HPP_NAMESPACE::Result result; 9620 do 9621 { 9622 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9623 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9624 { 9625 properties.resize( propertyCount ); 9626 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 9627 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 9628 } 9629 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9630 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 9631 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9632 if ( propertyCount < properties.size() ) 9633 { 9634 properties.resize( propertyCount ); 9635 } 9636 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9637 } 9638 9639 template < 9640 typename DisplayPlanePropertiesKHRAllocator, 9641 typename Dispatch, 9642 typename std::enable_if<std::is_same<typename DisplayPlanePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, 9643 int>::type> 9644 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9645 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,Dispatch const & d) const9646 PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const 9647 { 9648 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9649 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9650 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" ); 9651 # endif 9652 9653 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator ); 9654 uint32_t propertyCount; 9655 VULKAN_HPP_NAMESPACE::Result result; 9656 do 9657 { 9658 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9659 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9660 { 9661 properties.resize( propertyCount ); 9662 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 9663 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 9664 } 9665 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9666 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 9667 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9668 if ( propertyCount < properties.size() ) 9669 { 9670 properties.resize( propertyCount ); 9671 } 9672 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9673 } 9674 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9675 9676 template <typename Dispatch> getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,uint32_t * pDisplayCount,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,Dispatch const & d) const9677 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, 9678 uint32_t * pDisplayCount, 9679 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, 9680 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9681 { 9682 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9683 return static_cast<Result>( 9684 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) ); 9685 } 9686 9687 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9688 template <typename DisplayKHRAllocator, typename Dispatch> 9689 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,Dispatch const & d) const9690 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const 9691 { 9692 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9693 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9694 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" ); 9695 # endif 9696 9697 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays; 9698 uint32_t displayCount; 9699 VULKAN_HPP_NAMESPACE::Result result; 9700 do 9701 { 9702 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 9703 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) 9704 { 9705 displays.resize( displayCount ); 9706 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9707 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 9708 } 9709 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9710 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 9711 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 9712 if ( displayCount < displays.size() ) 9713 { 9714 displays.resize( displayCount ); 9715 } 9716 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); 9717 } 9718 9719 template <typename DisplayKHRAllocator, 9720 typename Dispatch, 9721 typename std::enable_if<std::is_same<typename DisplayKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayKHR>::value, int>::type> 9722 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,DisplayKHRAllocator & displayKHRAllocator,Dispatch const & d) const9723 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const 9724 { 9725 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9726 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9727 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" ); 9728 # endif 9729 9730 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator ); 9731 uint32_t displayCount; 9732 VULKAN_HPP_NAMESPACE::Result result; 9733 do 9734 { 9735 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 9736 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) 9737 { 9738 displays.resize( displayCount ); 9739 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9740 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 9741 } 9742 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9743 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 9744 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 9745 if ( displayCount < displays.size() ) 9746 { 9747 displays.resize( displayCount ); 9748 } 9749 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); 9750 } 9751 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9752 9753 template <typename Dispatch> getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,Dispatch const & d) const9754 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9755 uint32_t * pPropertyCount, 9756 VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, 9757 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9758 { 9759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9760 return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( 9761 m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) ); 9762 } 9763 9764 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9765 template <typename DisplayModePropertiesKHRAllocator, typename Dispatch> 9766 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9767 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const9768 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 9769 { 9770 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9771 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9772 VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" ); 9773 # endif 9774 9775 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties; 9776 uint32_t propertyCount; 9777 VULKAN_HPP_NAMESPACE::Result result; 9778 do 9779 { 9780 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9781 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 9782 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9783 { 9784 properties.resize( propertyCount ); 9785 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR( 9786 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 9787 } 9788 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9789 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 9790 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9791 if ( propertyCount < properties.size() ) 9792 { 9793 properties.resize( propertyCount ); 9794 } 9795 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9796 } 9797 9798 template <typename DisplayModePropertiesKHRAllocator, 9799 typename Dispatch, 9800 typename std::enable_if<std::is_same<typename DisplayModePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, 9801 int>::type> 9802 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9803 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,Dispatch const & d) const9804 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9805 DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, 9806 Dispatch const & d ) const 9807 { 9808 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9809 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9810 VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" ); 9811 # endif 9812 9813 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator ); 9814 uint32_t propertyCount; 9815 VULKAN_HPP_NAMESPACE::Result result; 9816 do 9817 { 9818 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9819 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 9820 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9821 { 9822 properties.resize( propertyCount ); 9823 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR( 9824 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 9825 } 9826 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9827 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 9828 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9829 if ( propertyCount < properties.size() ) 9830 { 9831 properties.resize( propertyCount ); 9832 } 9833 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9834 } 9835 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9836 9837 template <typename Dispatch> createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,Dispatch const & d) const9838 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9839 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, 9840 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9841 VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, 9842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9843 { 9844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9845 return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, 9846 static_cast<VkDisplayKHR>( display ), 9847 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), 9848 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9849 reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) ); 9850 } 9851 9852 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9853 template <typename Dispatch> 9854 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9855 PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9856 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, 9857 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9858 Dispatch const & d ) const 9859 { 9860 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9861 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9862 VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" ); 9863 # endif 9864 9865 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 9866 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9867 d.vkCreateDisplayModeKHR( m_physicalDevice, 9868 static_cast<VkDisplayKHR>( display ), 9869 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 9870 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9871 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 9872 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); 9873 9874 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( mode ) ); 9875 } 9876 9877 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9878 template <typename Dispatch> 9879 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type createDisplayModeKHRUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9880 PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9881 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, 9882 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9883 Dispatch const & d ) const 9884 { 9885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9886 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9887 VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" ); 9888 # endif 9889 9890 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 9891 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9892 d.vkCreateDisplayModeKHR( m_physicalDevice, 9893 static_cast<VkDisplayKHR>( display ), 9894 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 9895 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9896 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 9897 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); 9898 9899 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9900 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) ); 9901 } 9902 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9903 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9904 9905 template <typename Dispatch> 9906 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,Dispatch const & d) const9907 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, 9908 uint32_t planeIndex, 9909 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, 9910 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9911 { 9912 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9913 return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( 9914 m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) ); 9915 } 9916 9917 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9918 template <typename Dispatch> 9919 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,Dispatch const & d) const9920 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const 9921 { 9922 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9923 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9924 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilitiesKHR && "Function <vkGetDisplayPlaneCapabilitiesKHR> requires <VK_KHR_display>" ); 9925 # endif 9926 9927 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; 9928 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilitiesKHR( 9929 m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) ); 9930 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); 9931 9932 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); 9933 } 9934 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9935 9936 template <typename Dispatch> createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9937 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, 9938 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9939 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9940 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9941 { 9942 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9943 return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, 9944 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), 9945 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9946 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 9947 } 9948 9949 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9950 template <typename Dispatch> 9951 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9952 Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, 9953 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9954 Dispatch const & d ) const 9955 { 9956 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9957 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9958 VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" ); 9959 # endif 9960 9961 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9962 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR( 9963 m_instance, 9964 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 9965 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9966 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9967 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); 9968 9969 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 9970 } 9971 9972 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9973 template <typename Dispatch> 9974 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDisplayPlaneSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9975 Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, 9976 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9977 Dispatch const & d ) const 9978 { 9979 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9980 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9981 VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" ); 9982 # endif 9983 9984 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9985 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR( 9986 m_instance, 9987 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 9988 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9989 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9990 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); 9991 9992 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9993 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9994 } 9995 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9996 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9997 9998 //=== VK_KHR_display_swapchain === 9999 10000 template <typename Dispatch> createSharedSwapchainsKHR(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,Dispatch const & d) const10001 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, 10002 const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, 10003 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10004 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 10005 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10006 { 10007 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10008 return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 10009 swapchainCount, 10010 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), 10011 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10012 reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) ); 10013 } 10014 10015 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10016 template <typename SwapchainKHRAllocator, typename Dispatch> 10017 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10018 Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 10019 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10020 Dispatch const & d ) const 10021 { 10022 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10023 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10024 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10025 # endif 10026 10027 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() ); 10028 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10029 m_device, 10030 createInfos.size(), 10031 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 10032 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10033 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 10034 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 10035 10036 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); 10037 } 10038 10039 template <typename SwapchainKHRAllocator, 10040 typename Dispatch, 10041 typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SwapchainKHR>::value, int>::type> 10042 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const10043 Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 10044 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10045 SwapchainKHRAllocator & swapchainKHRAllocator, 10046 Dispatch const & d ) const 10047 { 10048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10049 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10050 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10051 # endif 10052 10053 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator ); 10054 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10055 m_device, 10056 createInfos.size(), 10057 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 10058 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10059 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 10060 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 10061 10062 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); 10063 } 10064 10065 template <typename Dispatch> 10066 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSharedSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10067 Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 10068 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10069 Dispatch const & d ) const 10070 { 10071 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10072 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10073 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10074 # endif 10075 10076 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 10077 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10078 m_device, 10079 1, 10080 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 10081 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10082 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 10083 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); 10084 10085 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); 10086 } 10087 10088 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10089 template <typename Dispatch, typename SwapchainKHRAllocator> 10090 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10091 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10092 Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 10093 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10094 Dispatch const & d ) const 10095 { 10096 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10097 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10098 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10099 # endif 10100 10101 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); 10102 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10103 m_device, 10104 createInfos.size(), 10105 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 10106 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10107 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 10108 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 10109 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains; 10110 uniqueSwapchains.reserve( createInfos.size() ); 10111 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 10112 for ( auto const & swapchain : swapchains ) 10113 { 10114 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); 10115 } 10116 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); 10117 } 10118 10119 template <typename Dispatch, 10120 typename SwapchainKHRAllocator, 10121 typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::value, 10122 int>::type> 10123 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10124 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const10125 Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 10126 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10127 SwapchainKHRAllocator & swapchainKHRAllocator, 10128 Dispatch const & d ) const 10129 { 10130 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10131 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10132 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10133 # endif 10134 10135 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); 10136 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10137 m_device, 10138 createInfos.size(), 10139 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 10140 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10141 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 10142 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 10143 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); 10144 uniqueSwapchains.reserve( createInfos.size() ); 10145 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 10146 for ( auto const & swapchain : swapchains ) 10147 { 10148 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); 10149 } 10150 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); 10151 } 10152 10153 template <typename Dispatch> 10154 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10155 Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 10156 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10157 Dispatch const & d ) const 10158 { 10159 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10160 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10161 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10162 # endif 10163 10164 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 10165 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10166 m_device, 10167 1, 10168 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 10169 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10170 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 10171 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); 10172 10173 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10174 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 10175 } 10176 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10177 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10178 10179 #if defined( VK_USE_PLATFORM_XLIB_KHR ) 10180 //=== VK_KHR_xlib_surface === 10181 10182 template <typename Dispatch> createXlibSurfaceKHR(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10183 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, 10184 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10185 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10186 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10187 { 10188 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10189 return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, 10190 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), 10191 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10192 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10193 } 10194 10195 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10196 template <typename Dispatch> 10197 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10198 Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, 10199 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10200 Dispatch const & d ) const 10201 { 10202 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10203 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10204 VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" ); 10205 # endif 10206 10207 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10208 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10209 d.vkCreateXlibSurfaceKHR( m_instance, 10210 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 10211 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10212 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10213 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); 10214 10215 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10216 } 10217 10218 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10219 template <typename Dispatch> 10220 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXlibSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10221 Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, 10222 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10223 Dispatch const & d ) const 10224 { 10225 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10226 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10227 VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" ); 10228 # endif 10229 10230 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10231 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10232 d.vkCreateXlibSurfaceKHR( m_instance, 10233 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 10234 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10235 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10236 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); 10237 10238 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10239 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10240 } 10241 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10242 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10243 10244 template <typename Dispatch> 10245 VULKAN_HPP_INLINE Bool32 getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display * dpy,VisualID visualID,Dispatch const & d) const10246 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10247 { 10248 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10249 return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); 10250 } 10251 10252 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10253 template <typename Dispatch> 10254 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display & dpy,VisualID visualID,Dispatch const & d) const10255 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10256 { 10257 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10258 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10259 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXlibPresentationSupportKHR && 10260 "Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> requires <VK_KHR_xlib_surface>" ); 10261 # endif 10262 10263 VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); 10264 10265 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 10266 } 10267 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10268 #endif /*VK_USE_PLATFORM_XLIB_KHR*/ 10269 10270 #if defined( VK_USE_PLATFORM_XCB_KHR ) 10271 //=== VK_KHR_xcb_surface === 10272 10273 template <typename Dispatch> createXcbSurfaceKHR(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10274 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, 10275 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10276 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10277 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10278 { 10279 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10280 return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, 10281 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), 10282 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10283 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10284 } 10285 10286 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10287 template <typename Dispatch> 10288 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10289 Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, 10290 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10291 Dispatch const & d ) const 10292 { 10293 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10294 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10295 VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" ); 10296 # endif 10297 10298 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10299 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10300 d.vkCreateXcbSurfaceKHR( m_instance, 10301 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 10302 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10303 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10304 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); 10305 10306 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10307 } 10308 10309 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10310 template <typename Dispatch> 10311 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXcbSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10312 Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, 10313 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10314 Dispatch const & d ) const 10315 { 10316 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10317 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10318 VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" ); 10319 # endif 10320 10321 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10322 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10323 d.vkCreateXcbSurfaceKHR( m_instance, 10324 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 10325 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10326 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10327 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); 10328 10329 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10330 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10331 } 10332 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10333 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10334 10335 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id,Dispatch const & d) const10336 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 10337 xcb_connection_t * connection, 10338 xcb_visualid_t visual_id, 10339 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10340 { 10341 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10342 return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); 10343 } 10344 10345 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10346 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t & connection,xcb_visualid_t visual_id,Dispatch const & d) const10347 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 10348 xcb_connection_t & connection, 10349 xcb_visualid_t visual_id, 10350 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10351 { 10352 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10353 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10354 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXcbPresentationSupportKHR && 10355 "Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> requires <VK_KHR_xcb_surface>" ); 10356 # endif 10357 10358 VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); 10359 10360 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 10361 } 10362 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10363 #endif /*VK_USE_PLATFORM_XCB_KHR*/ 10364 10365 #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) 10366 //=== VK_KHR_wayland_surface === 10367 10368 template <typename Dispatch> createWaylandSurfaceKHR(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10369 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, 10370 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10371 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10372 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10373 { 10374 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10375 return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, 10376 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), 10377 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10378 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10379 } 10380 10381 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10382 template <typename Dispatch> 10383 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10384 Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, 10385 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10386 Dispatch const & d ) const 10387 { 10388 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10389 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10390 VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" ); 10391 # endif 10392 10393 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10394 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR( 10395 m_instance, 10396 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 10397 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10398 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10399 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); 10400 10401 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10402 } 10403 10404 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10405 template <typename Dispatch> 10406 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWaylandSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10407 Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, 10408 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10409 Dispatch const & d ) const 10410 { 10411 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10412 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10413 VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" ); 10414 # endif 10415 10416 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10417 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR( 10418 m_instance, 10419 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 10420 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10421 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10422 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); 10423 10424 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10425 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10426 } 10427 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10428 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10429 10430 template <typename Dispatch> getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display * display,Dispatch const & d) const10431 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, 10432 struct wl_display * display, 10433 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10434 { 10435 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10436 return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); 10437 } 10438 10439 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10440 template <typename Dispatch> 10441 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display & display,Dispatch const & d) const10442 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10443 { 10444 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10445 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10446 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR && 10447 "Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> requires <VK_KHR_wayland_surface>" ); 10448 # endif 10449 10450 VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); 10451 10452 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 10453 } 10454 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10455 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ 10456 10457 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 10458 //=== VK_KHR_android_surface === 10459 10460 template <typename Dispatch> createAndroidSurfaceKHR(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10461 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, 10462 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10463 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10464 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10465 { 10466 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10467 return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, 10468 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), 10469 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10470 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10471 } 10472 10473 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10474 template <typename Dispatch> 10475 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10476 Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, 10477 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10478 Dispatch const & d ) const 10479 { 10480 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10481 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10482 VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" ); 10483 # endif 10484 10485 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10486 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR( 10487 m_instance, 10488 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 10489 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10490 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10491 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); 10492 10493 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10494 } 10495 10496 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10497 template <typename Dispatch> 10498 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createAndroidSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10499 Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, 10500 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10501 Dispatch const & d ) const 10502 { 10503 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10504 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10505 VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" ); 10506 # endif 10507 10508 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10509 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR( 10510 m_instance, 10511 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 10512 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10513 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10514 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); 10515 10516 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10517 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10518 } 10519 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10520 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10521 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 10522 10523 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 10524 //=== VK_KHR_win32_surface === 10525 10526 template <typename Dispatch> createWin32SurfaceKHR(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10527 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, 10528 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10529 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10530 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10531 { 10532 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10533 return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, 10534 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), 10535 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10536 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10537 } 10538 10539 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10540 template <typename Dispatch> 10541 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10542 Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, 10543 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10544 Dispatch const & d ) const 10545 { 10546 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10547 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10548 VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" ); 10549 # endif 10550 10551 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10552 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10553 d.vkCreateWin32SurfaceKHR( m_instance, 10554 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 10555 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10556 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10557 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); 10558 10559 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10560 } 10561 10562 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10563 template <typename Dispatch> 10564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWin32SurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10565 Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, 10566 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10567 Dispatch const & d ) const 10568 { 10569 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10570 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10571 VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" ); 10572 # endif 10573 10574 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10575 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10576 d.vkCreateWin32SurfaceKHR( m_instance, 10577 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 10578 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10579 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10580 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); 10581 10582 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10583 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10584 } 10585 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10586 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10587 10588 template <typename Dispatch> getWin32PresentationSupportKHR(uint32_t queueFamilyIndex,Dispatch const & d) const10589 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10590 { 10591 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10592 return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); 10593 } 10594 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 10595 10596 //=== VK_EXT_debug_report === 10597 10598 template <typename Dispatch> 10599 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,Dispatch const & d) const10600 Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, 10601 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10602 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, 10603 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10604 { 10605 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10606 return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, 10607 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), 10608 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10609 reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) ); 10610 } 10611 10612 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10613 template <typename Dispatch> 10614 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10615 Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, 10616 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10617 Dispatch const & d ) const 10618 { 10619 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10620 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10621 VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10622 # endif 10623 10624 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 10625 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT( 10626 m_instance, 10627 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 10628 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10629 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 10630 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); 10631 10632 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( callback ) ); 10633 } 10634 10635 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10636 template <typename Dispatch> 10637 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type createDebugReportCallbackEXTUnique(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10638 Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, 10639 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10640 Dispatch const & d ) const 10641 { 10642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10644 VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10645 # endif 10646 10647 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 10648 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT( 10649 m_instance, 10650 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 10651 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10652 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 10653 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); 10654 10655 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10656 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( callback, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10657 } 10658 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10659 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10660 10661 template <typename Dispatch> destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10662 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10663 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10664 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10665 { 10666 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10667 d.vkDestroyDebugReportCallbackEXT( 10668 m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10669 } 10670 10671 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10672 template <typename Dispatch> destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10673 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10674 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10675 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10676 { 10677 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10678 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10679 VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10680 # endif 10681 10682 d.vkDestroyDebugReportCallbackEXT( 10683 m_instance, 10684 static_cast<VkDebugReportCallbackEXT>( callback ), 10685 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10686 } 10687 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10688 10689 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10690 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10691 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10692 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10693 { 10694 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10695 d.vkDestroyDebugReportCallbackEXT( 10696 m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10697 } 10698 10699 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10700 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10701 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10702 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10703 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10704 { 10705 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10706 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10707 VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10708 # endif 10709 10710 d.vkDestroyDebugReportCallbackEXT( 10711 m_instance, 10712 static_cast<VkDebugReportCallbackEXT>( callback ), 10713 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10714 } 10715 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10716 10717 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,uint64_t object,size_t location,int32_t messageCode,const char * pLayerPrefix,const char * pMessage,Dispatch const & d) const10718 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 10719 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, 10720 uint64_t object, 10721 size_t location, 10722 int32_t messageCode, 10723 const char * pLayerPrefix, 10724 const char * pMessage, 10725 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10726 { 10727 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10728 d.vkDebugReportMessageEXT( m_instance, 10729 static_cast<VkDebugReportFlagsEXT>( flags ), 10730 static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), 10731 object, 10732 location, 10733 messageCode, 10734 pLayerPrefix, 10735 pMessage ); 10736 } 10737 10738 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10739 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,uint64_t object,size_t location,int32_t messageCode,const std::string & layerPrefix,const std::string & message,Dispatch const & d) const10740 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 10741 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, 10742 uint64_t object, 10743 size_t location, 10744 int32_t messageCode, 10745 const std::string & layerPrefix, 10746 const std::string & message, 10747 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10748 { 10749 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10750 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10751 VULKAN_HPP_ASSERT( d.vkDebugReportMessageEXT && "Function <vkDebugReportMessageEXT> requires <VK_EXT_debug_report>" ); 10752 # endif 10753 10754 d.vkDebugReportMessageEXT( m_instance, 10755 static_cast<VkDebugReportFlagsEXT>( flags ), 10756 static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), 10757 object, 10758 location, 10759 messageCode, 10760 layerPrefix.c_str(), 10761 message.c_str() ); 10762 } 10763 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10764 10765 //=== VK_EXT_debug_marker === 10766 10767 template <typename Dispatch> debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,Dispatch const & d) const10768 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, 10769 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10770 { 10771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10772 return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) ); 10773 } 10774 10775 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10776 template <typename Dispatch> 10777 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo,Dispatch const & d) const10778 Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 10779 { 10780 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10781 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10782 VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectTagEXT && "Function <vkDebugMarkerSetObjectTagEXT> requires <VK_EXT_debug_marker>" ); 10783 # endif 10784 10785 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10786 d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) ); 10787 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); 10788 10789 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 10790 } 10791 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10792 10793 template <typename Dispatch> debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,Dispatch const & d) const10794 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, 10795 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10796 { 10797 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10798 return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) ); 10799 } 10800 10801 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10802 template <typename Dispatch> 10803 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo,Dispatch const & d) const10804 Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 10805 { 10806 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10807 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10808 VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectNameEXT && "Function <vkDebugMarkerSetObjectNameEXT> requires <VK_EXT_debug_marker>" ); 10809 # endif 10810 10811 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10812 d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) ); 10813 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); 10814 10815 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 10816 } 10817 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10818 10819 template <typename Dispatch> debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const10820 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 10821 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10822 { 10823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10824 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 10825 } 10826 10827 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10828 template <typename Dispatch> debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const10829 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, 10830 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10831 { 10832 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10833 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10834 VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerBeginEXT && "Function <vkCmdDebugMarkerBeginEXT> requires <VK_EXT_debug_marker>" ); 10835 # endif 10836 10837 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 10838 } 10839 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10840 10841 template <typename Dispatch> debugMarkerEndEXT(Dispatch const & d) const10842 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10843 { 10844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10845 d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); 10846 } 10847 10848 template <typename Dispatch> debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const10849 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 10850 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10851 { 10852 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10853 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 10854 } 10855 10856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10857 template <typename Dispatch> debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const10858 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, 10859 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10860 { 10861 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10862 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10863 VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerInsertEXT && "Function <vkCmdDebugMarkerInsertEXT> requires <VK_EXT_debug_marker>" ); 10864 # endif 10865 10866 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 10867 } 10868 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10869 10870 //=== VK_KHR_video_queue === 10871 10872 template <typename Dispatch> getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,Dispatch const & d) const10873 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, 10874 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, 10875 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10876 { 10877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10878 return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 10879 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) ); 10880 } 10881 10882 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10883 template <typename Dispatch> 10884 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,Dispatch const & d) const10885 PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const 10886 { 10887 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10888 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10889 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" ); 10890 # endif 10891 10892 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; 10893 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 10894 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 10895 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 10896 10897 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); 10898 } 10899 10900 template <typename X, typename Y, typename... Z, typename Dispatch> 10901 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,Dispatch const & d) const10902 PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const 10903 { 10904 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10905 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10906 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" ); 10907 # endif 10908 10909 StructureChain<X, Y, Z...> structureChain; 10910 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>(); 10911 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 10912 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 10913 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 10914 10915 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 10916 } 10917 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10918 10919 template <typename Dispatch> 10920 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,uint32_t * pVideoFormatPropertyCount,VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,Dispatch const & d) const10921 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, 10922 uint32_t * pVideoFormatPropertyCount, 10923 VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, 10924 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10925 { 10926 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10927 return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 10928 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ), 10929 pVideoFormatPropertyCount, 10930 reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) ); 10931 } 10932 10933 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10934 template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch> 10935 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10936 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,Dispatch const & d) const10937 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const 10938 { 10939 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10940 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10941 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && 10942 "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); 10943 # endif 10944 10945 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties; 10946 uint32_t videoFormatPropertyCount; 10947 VULKAN_HPP_NAMESPACE::Result result; 10948 do 10949 { 10950 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 10951 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 10952 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 10953 { 10954 videoFormatProperties.resize( videoFormatPropertyCount ); 10955 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10956 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 10957 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 10958 &videoFormatPropertyCount, 10959 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 10960 } 10961 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 10962 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 10963 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 10964 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 10965 { 10966 videoFormatProperties.resize( videoFormatPropertyCount ); 10967 } 10968 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); 10969 } 10970 10971 template <typename VideoFormatPropertiesKHRAllocator, 10972 typename Dispatch, 10973 typename std::enable_if<std::is_same<typename VideoFormatPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, 10974 int>::type> 10975 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10976 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,Dispatch const & d) const10977 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, 10978 VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, 10979 Dispatch const & d ) const 10980 { 10981 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10982 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10983 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && 10984 "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); 10985 # endif 10986 10987 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator ); 10988 uint32_t videoFormatPropertyCount; 10989 VULKAN_HPP_NAMESPACE::Result result; 10990 do 10991 { 10992 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 10993 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 10994 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 10995 { 10996 videoFormatProperties.resize( videoFormatPropertyCount ); 10997 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10998 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 10999 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 11000 &videoFormatPropertyCount, 11001 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 11002 } 11003 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11004 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 11005 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 11006 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 11007 { 11008 videoFormatProperties.resize( videoFormatPropertyCount ); 11009 } 11010 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); 11011 } 11012 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11013 11014 template <typename Dispatch> createVideoSessionKHR(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,Dispatch const & d) const11015 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, 11016 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11017 VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, 11018 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11019 { 11020 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11021 return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device, 11022 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ), 11023 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11024 reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) ); 11025 } 11026 11027 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11028 template <typename Dispatch> 11029 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type createVideoSessionKHR(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11030 Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, 11031 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11032 Dispatch const & d ) const 11033 { 11034 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11035 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11036 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" ); 11037 # endif 11038 11039 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 11040 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11041 d.vkCreateVideoSessionKHR( m_device, 11042 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 11043 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11044 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 11045 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); 11046 11047 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSession ) ); 11048 } 11049 11050 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11051 template <typename Dispatch> 11052 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type createVideoSessionKHRUnique(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11053 Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, 11054 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11055 Dispatch const & d ) const 11056 { 11057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11058 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11059 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" ); 11060 # endif 11061 11062 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 11063 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11064 d.vkCreateVideoSessionKHR( m_device, 11065 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 11066 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11067 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 11068 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); 11069 11070 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 11071 result, UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 11072 } 11073 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11074 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11075 11076 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11077 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11078 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11079 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11080 { 11081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11082 d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11083 } 11084 11085 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11086 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11087 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11088 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11089 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11090 { 11091 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11092 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11093 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" ); 11094 # endif 11095 11096 d.vkDestroyVideoSessionKHR( 11097 m_device, 11098 static_cast<VkVideoSessionKHR>( videoSession ), 11099 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11100 } 11101 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11102 11103 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11104 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11105 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11107 { 11108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11109 d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11110 } 11111 11112 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11113 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11114 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11115 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11116 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11117 { 11118 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11119 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11120 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" ); 11121 # endif 11122 11123 d.vkDestroyVideoSessionKHR( 11124 m_device, 11125 static_cast<VkVideoSessionKHR>( videoSession ), 11126 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11127 } 11128 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11129 11130 template <typename Dispatch> 11131 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t * pMemoryRequirementsCount,VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,Dispatch const & d) const11132 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11133 uint32_t * pMemoryRequirementsCount, 11134 VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, 11135 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11136 { 11137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11138 return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 11139 static_cast<VkVideoSessionKHR>( videoSession ), 11140 pMemoryRequirementsCount, 11141 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) ); 11142 } 11143 11144 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11145 template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch> 11146 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11147 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Dispatch const & d) const11148 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const 11149 { 11150 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11151 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11152 VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" ); 11153 # endif 11154 11155 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements; 11156 uint32_t memoryRequirementsCount; 11157 VULKAN_HPP_NAMESPACE::Result result; 11158 do 11159 { 11160 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11161 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); 11162 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) 11163 { 11164 memoryRequirements.resize( memoryRequirementsCount ); 11165 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11166 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 11167 static_cast<VkVideoSessionKHR>( videoSession ), 11168 &memoryRequirementsCount, 11169 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); 11170 } 11171 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11172 11173 VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); 11174 if ( memoryRequirementsCount < memoryRequirements.size() ) 11175 { 11176 memoryRequirements.resize( memoryRequirementsCount ); 11177 } 11178 return memoryRequirements; 11179 } 11180 11181 template <typename VideoSessionMemoryRequirementsKHRAllocator, 11182 typename Dispatch, 11183 typename std::enable_if< 11184 std::is_same<typename VideoSessionMemoryRequirementsKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value, 11185 int>::type> 11186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11187 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,Dispatch const & d) const11188 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11189 VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, 11190 Dispatch const & d ) const 11191 { 11192 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11193 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11194 VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" ); 11195 # endif 11196 11197 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements( 11198 videoSessionMemoryRequirementsKHRAllocator ); 11199 uint32_t memoryRequirementsCount; 11200 VULKAN_HPP_NAMESPACE::Result result; 11201 do 11202 { 11203 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11204 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); 11205 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) 11206 { 11207 memoryRequirements.resize( memoryRequirementsCount ); 11208 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11209 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 11210 static_cast<VkVideoSessionKHR>( videoSession ), 11211 &memoryRequirementsCount, 11212 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); 11213 } 11214 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11215 11216 VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); 11217 if ( memoryRequirementsCount < memoryRequirements.size() ) 11218 { 11219 memoryRequirements.resize( memoryRequirementsCount ); 11220 } 11221 return memoryRequirements; 11222 } 11223 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11224 11225 template <typename Dispatch> 11226 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t bindSessionMemoryInfoCount,const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,Dispatch const & d) const11227 Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11228 uint32_t bindSessionMemoryInfoCount, 11229 const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, 11230 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11231 { 11232 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11233 return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device, 11234 static_cast<VkVideoSessionKHR>( videoSession ), 11235 bindSessionMemoryInfoCount, 11236 reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) ); 11237 } 11238 11239 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11240 template <typename Dispatch> bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos,Dispatch const & d) const11241 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindVideoSessionMemoryKHR( 11242 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11243 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos, 11244 Dispatch const & d ) const 11245 { 11246 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11247 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11248 VULKAN_HPP_ASSERT( d.vkBindVideoSessionMemoryKHR && "Function <vkBindVideoSessionMemoryKHR> requires <VK_KHR_video_queue>" ); 11249 # endif 11250 11251 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11252 d.vkBindVideoSessionMemoryKHR( m_device, 11253 static_cast<VkVideoSessionKHR>( videoSession ), 11254 bindSessionMemoryInfos.size(), 11255 reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) ) ); 11256 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); 11257 11258 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 11259 } 11260 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11261 11262 template <typename Dispatch> 11263 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,Dispatch const & d) const11264 Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, 11265 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11266 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, 11267 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11268 { 11269 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11270 return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device, 11271 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ), 11272 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11273 reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) ); 11274 } 11275 11276 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11277 template <typename Dispatch> 11278 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type createVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11279 Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, 11280 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11281 Dispatch const & d ) const 11282 { 11283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11284 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11285 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11286 # endif 11287 11288 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 11289 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR( 11290 m_device, 11291 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 11292 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11293 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 11294 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); 11295 11296 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSessionParameters ) ); 11297 } 11298 11299 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11300 template <typename Dispatch> 11301 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type createVideoSessionParametersKHRUnique(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11302 Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, 11303 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11304 Dispatch const & d ) const 11305 { 11306 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11307 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11308 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11309 # endif 11310 11311 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 11312 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR( 11313 m_device, 11314 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 11315 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11316 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 11317 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); 11318 11319 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 11320 UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>( 11321 videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 11322 } 11323 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11324 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11325 11326 template <typename Dispatch> 11327 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,Dispatch const & d) const11328 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11329 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, 11330 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11331 { 11332 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11333 return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device, 11334 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11335 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) ); 11336 } 11337 11338 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11339 template <typename Dispatch> 11340 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,Dispatch const & d) const11341 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11342 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, 11343 Dispatch const & d ) const 11344 { 11345 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11346 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11347 VULKAN_HPP_ASSERT( d.vkUpdateVideoSessionParametersKHR && "Function <vkUpdateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11348 # endif 11349 11350 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11351 d.vkUpdateVideoSessionParametersKHR( m_device, 11352 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11353 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) ); 11354 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); 11355 11356 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 11357 } 11358 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11359 11360 template <typename Dispatch> destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11361 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11362 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11364 { 11365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11366 d.vkDestroyVideoSessionParametersKHR( 11367 m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11368 } 11369 11370 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11371 template <typename Dispatch> destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11372 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11373 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11374 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11375 { 11376 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11377 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11378 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11379 # endif 11380 11381 d.vkDestroyVideoSessionParametersKHR( 11382 m_device, 11383 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11384 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11385 } 11386 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11387 11388 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11389 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11390 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11391 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11392 { 11393 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11394 d.vkDestroyVideoSessionParametersKHR( 11395 m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11396 } 11397 11398 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11399 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11400 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11401 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11402 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11403 { 11404 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11405 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11406 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11407 # endif 11408 11409 d.vkDestroyVideoSessionParametersKHR( 11410 m_device, 11411 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11412 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11413 } 11414 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11415 11416 template <typename Dispatch> beginVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,Dispatch const & d) const11417 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, 11418 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11419 { 11420 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11421 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) ); 11422 } 11423 11424 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11425 template <typename Dispatch> beginVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,Dispatch const & d) const11426 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, 11427 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11428 { 11429 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11430 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11431 VULKAN_HPP_ASSERT( d.vkCmdBeginVideoCodingKHR && "Function <vkCmdBeginVideoCodingKHR> requires <VK_KHR_video_queue>" ); 11432 # endif 11433 11434 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) ); 11435 } 11436 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11437 11438 template <typename Dispatch> endVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,Dispatch const & d) const11439 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, 11440 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11441 { 11442 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11443 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) ); 11444 } 11445 11446 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11447 template <typename Dispatch> endVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,Dispatch const & d) const11448 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, 11449 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11450 { 11451 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11452 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11453 VULKAN_HPP_ASSERT( d.vkCmdEndVideoCodingKHR && "Function <vkCmdEndVideoCodingKHR> requires <VK_KHR_video_queue>" ); 11454 # endif 11455 11456 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) ); 11457 } 11458 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11459 11460 template <typename Dispatch> controlVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,Dispatch const & d) const11461 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, 11462 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11463 { 11464 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11465 d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) ); 11466 } 11467 11468 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11469 template <typename Dispatch> controlVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,Dispatch const & d) const11470 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, 11471 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11472 { 11473 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11474 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11475 VULKAN_HPP_ASSERT( d.vkCmdControlVideoCodingKHR && "Function <vkCmdControlVideoCodingKHR> requires <VK_KHR_video_queue>" ); 11476 # endif 11477 11478 d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) ); 11479 } 11480 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11481 11482 //=== VK_KHR_video_decode_queue === 11483 11484 template <typename Dispatch> decodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,Dispatch const & d) const11485 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, 11486 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11487 { 11488 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11489 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) ); 11490 } 11491 11492 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11493 template <typename Dispatch> decodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,Dispatch const & d) const11494 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, 11495 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11496 { 11497 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11498 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11499 VULKAN_HPP_ASSERT( d.vkCmdDecodeVideoKHR && "Function <vkCmdDecodeVideoKHR> requires <VK_KHR_video_decode_queue>" ); 11500 # endif 11501 11502 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) ); 11503 } 11504 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11505 11506 //=== VK_EXT_transform_feedback === 11507 11508 template <typename Dispatch> bindTransformFeedbackBuffersEXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,Dispatch const & d) const11509 VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 11510 uint32_t bindingCount, 11511 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 11512 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 11513 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 11514 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11515 { 11516 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11517 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, 11518 firstBinding, 11519 bindingCount, 11520 reinterpret_cast<const VkBuffer *>( pBuffers ), 11521 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 11522 reinterpret_cast<const VkDeviceSize *>( pSizes ) ); 11523 } 11524 11525 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11526 template <typename Dispatch> 11527 VULKAN_HPP_INLINE void bindTransformFeedbackBuffersEXT(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,Dispatch const & d) const11528 CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 11529 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 11530 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 11531 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 11532 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 11533 { 11534 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11535 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11536 VULKAN_HPP_ASSERT( d.vkCmdBindTransformFeedbackBuffersEXT && "Function <vkCmdBindTransformFeedbackBuffersEXT> requires <VK_EXT_transform_feedback>" ); 11537 # endif 11538 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11539 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 11540 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 11541 # else 11542 if ( buffers.size() != offsets.size() ) 11543 { 11544 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); 11545 } 11546 if ( !sizes.empty() && buffers.size() != sizes.size() ) 11547 { 11548 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); 11549 } 11550 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11551 11552 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, 11553 firstBinding, 11554 buffers.size(), 11555 reinterpret_cast<const VkBuffer *>( buffers.data() ), 11556 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 11557 reinterpret_cast<const VkDeviceSize *>( sizes.data() ) ); 11558 } 11559 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11560 11561 template <typename Dispatch> beginTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const11562 VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, 11563 uint32_t counterBufferCount, 11564 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 11565 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 11566 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11567 { 11568 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11569 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, 11570 firstCounterBuffer, 11571 counterBufferCount, 11572 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 11573 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 11574 } 11575 11576 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11577 template <typename Dispatch> 11578 VULKAN_HPP_INLINE void beginTransformFeedbackEXT(uint32_t firstCounterBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const11579 CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, 11580 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 11581 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 11582 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 11583 { 11584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11585 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11586 VULKAN_HPP_ASSERT( d.vkCmdBeginTransformFeedbackEXT && "Function <vkCmdBeginTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" ); 11587 # endif 11588 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11589 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 11590 # else 11591 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 11592 { 11593 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 11594 } 11595 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11596 11597 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, 11598 firstCounterBuffer, 11599 counterBuffers.size(), 11600 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 11601 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 11602 } 11603 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11604 11605 template <typename Dispatch> endTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const11606 VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, 11607 uint32_t counterBufferCount, 11608 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 11609 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 11610 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11611 { 11612 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11613 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, 11614 firstCounterBuffer, 11615 counterBufferCount, 11616 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 11617 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 11618 } 11619 11620 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11621 template <typename Dispatch> 11622 VULKAN_HPP_INLINE void endTransformFeedbackEXT(uint32_t firstCounterBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const11623 CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, 11624 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 11625 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 11626 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 11627 { 11628 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11629 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11630 VULKAN_HPP_ASSERT( d.vkCmdEndTransformFeedbackEXT && "Function <vkCmdEndTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" ); 11631 # endif 11632 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11633 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 11634 # else 11635 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 11636 { 11637 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 11638 } 11639 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11640 11641 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, 11642 firstCounterBuffer, 11643 counterBuffers.size(), 11644 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 11645 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 11646 } 11647 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11648 11649 template <typename Dispatch> beginQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,uint32_t index,Dispatch const & d) const11650 VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 11651 uint32_t query, 11652 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 11653 uint32_t index, 11654 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11655 { 11656 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11657 d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index ); 11658 } 11659 11660 template <typename Dispatch> 11661 VULKAN_HPP_INLINE void endQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,uint32_t index,Dispatch const & d) const11662 CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11663 { 11664 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11665 d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index ); 11666 } 11667 11668 template <typename Dispatch> drawIndirectByteCountEXT(uint32_t instanceCount,uint32_t firstInstance,VULKAN_HPP_NAMESPACE::Buffer counterBuffer,VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,uint32_t counterOffset,uint32_t vertexStride,Dispatch const & d) const11669 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, 11670 uint32_t firstInstance, 11671 VULKAN_HPP_NAMESPACE::Buffer counterBuffer, 11672 VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, 11673 uint32_t counterOffset, 11674 uint32_t vertexStride, 11675 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11676 { 11677 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11678 d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, 11679 instanceCount, 11680 firstInstance, 11681 static_cast<VkBuffer>( counterBuffer ), 11682 static_cast<VkDeviceSize>( counterBufferOffset ), 11683 counterOffset, 11684 vertexStride ); 11685 } 11686 11687 //=== VK_NVX_binary_import === 11688 11689 template <typename Dispatch> createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,Dispatch const & d) const11690 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, 11691 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11692 VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, 11693 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11694 { 11695 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11696 return static_cast<Result>( d.vkCreateCuModuleNVX( m_device, 11697 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ), 11698 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11699 reinterpret_cast<VkCuModuleNVX *>( pModule ) ) ); 11700 } 11701 11702 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11703 template <typename Dispatch> 11704 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11705 Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, 11706 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11707 Dispatch const & d ) const 11708 { 11709 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11710 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11711 VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" ); 11712 # endif 11713 11714 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 11715 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11716 d.vkCreateCuModuleNVX( m_device, 11717 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 11718 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11719 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 11720 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); 11721 11722 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); 11723 } 11724 11725 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11726 template <typename Dispatch> 11727 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type createCuModuleNVXUnique(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11728 Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, 11729 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11730 Dispatch const & d ) const 11731 { 11732 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11733 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11734 VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" ); 11735 # endif 11736 11737 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 11738 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11739 d.vkCreateCuModuleNVX( m_device, 11740 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 11741 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11742 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 11743 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); 11744 11745 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 11746 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 11747 } 11748 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11749 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11750 11751 template <typename Dispatch> createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,Dispatch const & d) const11752 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, 11753 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11754 VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, 11755 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11756 { 11757 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11758 return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device, 11759 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ), 11760 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11761 reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) ); 11762 } 11763 11764 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11765 template <typename Dispatch> 11766 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11767 Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, 11768 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11769 Dispatch const & d ) const 11770 { 11771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11772 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11773 VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" ); 11774 # endif 11775 11776 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 11777 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11778 d.vkCreateCuFunctionNVX( m_device, 11779 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 11780 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11781 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 11782 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); 11783 11784 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); 11785 } 11786 11787 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11788 template <typename Dispatch> 11789 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type createCuFunctionNVXUnique(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11790 Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, 11791 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11792 Dispatch const & d ) const 11793 { 11794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11795 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11796 VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" ); 11797 # endif 11798 11799 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 11800 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11801 d.vkCreateCuFunctionNVX( m_device, 11802 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 11803 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11804 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 11805 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); 11806 11807 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 11808 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 11809 } 11810 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11811 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11812 11813 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11814 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 11815 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11816 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11817 { 11818 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11819 d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11820 } 11821 11822 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11823 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11824 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 11825 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11826 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11827 { 11828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11829 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11830 VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" ); 11831 # endif 11832 11833 d.vkDestroyCuModuleNVX( m_device, 11834 static_cast<VkCuModuleNVX>( module ), 11835 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11836 } 11837 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11838 11839 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11840 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 11841 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11843 { 11844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11845 d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11846 } 11847 11848 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11849 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11850 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 11851 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11852 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11853 { 11854 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11855 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11856 VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" ); 11857 # endif 11858 11859 d.vkDestroyCuModuleNVX( m_device, 11860 static_cast<VkCuModuleNVX>( module ), 11861 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11862 } 11863 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11864 11865 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11866 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 11867 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11868 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11869 { 11870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11871 d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11872 } 11873 11874 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11875 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11876 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 11877 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11878 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11879 { 11880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11881 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11882 VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" ); 11883 # endif 11884 11885 d.vkDestroyCuFunctionNVX( m_device, 11886 static_cast<VkCuFunctionNVX>( function ), 11887 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11888 } 11889 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11890 11891 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11892 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 11893 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11894 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11895 { 11896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11897 d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11898 } 11899 11900 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11901 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11902 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 11903 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11904 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11905 { 11906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11907 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11908 VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" ); 11909 # endif 11910 11911 d.vkDestroyCuFunctionNVX( m_device, 11912 static_cast<VkCuFunctionNVX>( function ), 11913 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11914 } 11915 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11916 11917 template <typename Dispatch> cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,Dispatch const & d) const11918 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, 11919 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11920 { 11921 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11922 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) ); 11923 } 11924 11925 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11926 template <typename Dispatch> cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo,Dispatch const & d) const11927 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, 11928 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11929 { 11930 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11931 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11932 VULKAN_HPP_ASSERT( d.vkCmdCuLaunchKernelNVX && "Function <vkCmdCuLaunchKernelNVX> requires <VK_NVX_binary_import>" ); 11933 # endif 11934 11935 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) ); 11936 } 11937 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11938 11939 //=== VK_NVX_image_view_handle === 11940 11941 template <typename Dispatch> getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,Dispatch const & d) const11942 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, 11943 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11944 { 11945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11946 return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); 11947 } 11948 11949 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11950 template <typename Dispatch> getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,Dispatch const & d) const11951 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, 11952 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11953 { 11954 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11955 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11956 VULKAN_HPP_ASSERT( d.vkGetImageViewHandleNVX && "Function <vkGetImageViewHandleNVX> requires <VK_NVX_image_view_handle>" ); 11957 # endif 11958 11959 uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) ); 11960 11961 return result; 11962 } 11963 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11964 11965 template <typename Dispatch> getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,Dispatch const & d) const11966 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, 11967 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, 11968 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11969 { 11970 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11971 return static_cast<Result>( 11972 d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) ); 11973 } 11974 11975 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11976 template <typename Dispatch> 11977 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,Dispatch const & d) const11978 Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const 11979 { 11980 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11981 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11982 VULKAN_HPP_ASSERT( d.vkGetImageViewAddressNVX && "Function <vkGetImageViewAddressNVX> requires <VK_NVX_image_view_handle>" ); 11983 # endif 11984 11985 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; 11986 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11987 d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) ); 11988 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); 11989 11990 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 11991 } 11992 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11993 11994 //=== VK_AMD_draw_indirect_count === 11995 11996 template <typename Dispatch> drawIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const11997 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 11998 VULKAN_HPP_NAMESPACE::DeviceSize offset, 11999 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 12000 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 12001 uint32_t maxDrawCount, 12002 uint32_t stride, 12003 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12004 { 12005 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12006 d.vkCmdDrawIndirectCountAMD( m_commandBuffer, 12007 static_cast<VkBuffer>( buffer ), 12008 static_cast<VkDeviceSize>( offset ), 12009 static_cast<VkBuffer>( countBuffer ), 12010 static_cast<VkDeviceSize>( countBufferOffset ), 12011 maxDrawCount, 12012 stride ); 12013 } 12014 12015 template <typename Dispatch> drawIndexedIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const12016 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 12017 VULKAN_HPP_NAMESPACE::DeviceSize offset, 12018 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 12019 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 12020 uint32_t maxDrawCount, 12021 uint32_t stride, 12022 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12023 { 12024 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12025 d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, 12026 static_cast<VkBuffer>( buffer ), 12027 static_cast<VkDeviceSize>( offset ), 12028 static_cast<VkBuffer>( countBuffer ), 12029 static_cast<VkDeviceSize>( countBufferOffset ), 12030 maxDrawCount, 12031 stride ); 12032 } 12033 12034 //=== VK_AMD_shader_info === 12035 12036 template <typename Dispatch> getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,size_t * pInfoSize,void * pInfo,Dispatch const & d) const12037 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 12038 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 12039 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 12040 size_t * pInfoSize, 12041 void * pInfo, 12042 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12043 { 12044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12045 return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, 12046 static_cast<VkPipeline>( pipeline ), 12047 static_cast<VkShaderStageFlagBits>( shaderStage ), 12048 static_cast<VkShaderInfoTypeAMD>( infoType ), 12049 pInfoSize, 12050 pInfo ) ); 12051 } 12052 12053 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12054 template <typename Uint8_tAllocator, typename Dispatch> 12055 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Dispatch const & d) const12056 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 12057 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 12058 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 12059 Dispatch const & d ) const 12060 { 12061 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12062 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12063 VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" ); 12064 # endif 12065 12066 std::vector<uint8_t, Uint8_tAllocator> info; 12067 size_t infoSize; 12068 VULKAN_HPP_NAMESPACE::Result result; 12069 do 12070 { 12071 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 12072 static_cast<VkPipeline>( pipeline ), 12073 static_cast<VkShaderStageFlagBits>( shaderStage ), 12074 static_cast<VkShaderInfoTypeAMD>( infoType ), 12075 &infoSize, 12076 nullptr ) ); 12077 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) 12078 { 12079 info.resize( infoSize ); 12080 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 12081 static_cast<VkPipeline>( pipeline ), 12082 static_cast<VkShaderStageFlagBits>( shaderStage ), 12083 static_cast<VkShaderInfoTypeAMD>( infoType ), 12084 &infoSize, 12085 reinterpret_cast<void *>( info.data() ) ) ); 12086 } 12087 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12088 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 12089 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 12090 if ( infoSize < info.size() ) 12091 { 12092 info.resize( infoSize ); 12093 } 12094 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); 12095 } 12096 12097 template <typename Uint8_tAllocator, 12098 typename Dispatch, 12099 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 12100 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const12101 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 12102 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 12103 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 12104 Uint8_tAllocator & uint8_tAllocator, 12105 Dispatch const & d ) const 12106 { 12107 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12108 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12109 VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" ); 12110 # endif 12111 12112 std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator ); 12113 size_t infoSize; 12114 VULKAN_HPP_NAMESPACE::Result result; 12115 do 12116 { 12117 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 12118 static_cast<VkPipeline>( pipeline ), 12119 static_cast<VkShaderStageFlagBits>( shaderStage ), 12120 static_cast<VkShaderInfoTypeAMD>( infoType ), 12121 &infoSize, 12122 nullptr ) ); 12123 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) 12124 { 12125 info.resize( infoSize ); 12126 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 12127 static_cast<VkPipeline>( pipeline ), 12128 static_cast<VkShaderStageFlagBits>( shaderStage ), 12129 static_cast<VkShaderInfoTypeAMD>( infoType ), 12130 &infoSize, 12131 reinterpret_cast<void *>( info.data() ) ) ); 12132 } 12133 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12134 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 12135 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 12136 if ( infoSize < info.size() ) 12137 { 12138 info.resize( infoSize ); 12139 } 12140 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); 12141 } 12142 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12143 12144 //=== VK_KHR_dynamic_rendering === 12145 12146 template <typename Dispatch> beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,Dispatch const & d) const12147 VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 12148 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12149 { 12150 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12151 d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); 12152 } 12153 12154 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12155 template <typename Dispatch> beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const12156 VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, 12157 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12158 { 12159 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12160 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12161 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderingKHR && "Function <vkCmdBeginRenderingKHR> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" ); 12162 # endif 12163 12164 d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); 12165 } 12166 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12167 12168 template <typename Dispatch> endRenderingKHR(Dispatch const & d) const12169 VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12170 { 12171 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12172 d.vkCmdEndRenderingKHR( m_commandBuffer ); 12173 } 12174 12175 #if defined( VK_USE_PLATFORM_GGP ) 12176 //=== VK_GGP_stream_descriptor_surface === 12177 12178 template <typename Dispatch> 12179 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createStreamDescriptorSurfaceGGP(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const12180 Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, 12181 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12182 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 12183 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12184 { 12185 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12186 return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, 12187 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), 12188 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12189 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 12190 } 12191 12192 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12193 template <typename Dispatch> 12194 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12195 Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, 12196 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12197 Dispatch const & d ) const 12198 { 12199 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12200 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12201 VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" ); 12202 # endif 12203 12204 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12205 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP( 12206 m_instance, 12207 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 12208 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12209 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12210 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); 12211 12212 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 12213 } 12214 12215 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12216 template <typename Dispatch> 12217 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createStreamDescriptorSurfaceGGPUnique(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12218 Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, 12219 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12220 Dispatch const & d ) const 12221 { 12222 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12223 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12224 VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" ); 12225 # endif 12226 12227 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12228 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP( 12229 m_instance, 12230 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 12231 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12232 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12233 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); 12234 12235 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 12236 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 12237 } 12238 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12239 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12240 #endif /*VK_USE_PLATFORM_GGP*/ 12241 12242 //=== VK_NV_external_memory_capabilities === 12243 12244 template <typename Dispatch> 12245 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,Dispatch const & d) const12246 PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, 12247 VULKAN_HPP_NAMESPACE::ImageType type, 12248 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 12249 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 12250 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 12251 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 12252 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, 12253 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12254 { 12255 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12256 return static_cast<Result>( 12257 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, 12258 static_cast<VkFormat>( format ), 12259 static_cast<VkImageType>( type ), 12260 static_cast<VkImageTiling>( tiling ), 12261 static_cast<VkImageUsageFlags>( usage ), 12262 static_cast<VkImageCreateFlags>( flags ), 12263 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 12264 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) ); 12265 } 12266 12267 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12268 template <typename Dispatch> 12269 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,Dispatch const & d) const12270 PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, 12271 VULKAN_HPP_NAMESPACE::ImageType type, 12272 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 12273 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 12274 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 12275 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 12276 Dispatch const & d ) const 12277 { 12278 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12279 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12280 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV && 12281 "Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> requires <VK_NV_external_memory_capabilities>" ); 12282 # endif 12283 12284 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; 12285 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12286 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, 12287 static_cast<VkFormat>( format ), 12288 static_cast<VkImageType>( type ), 12289 static_cast<VkImageTiling>( tiling ), 12290 static_cast<VkImageUsageFlags>( usage ), 12291 static_cast<VkImageCreateFlags>( flags ), 12292 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 12293 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) ); 12294 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); 12295 12296 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( externalImageFormatProperties ) ); 12297 } 12298 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12299 12300 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 12301 //=== VK_NV_external_memory_win32 === 12302 12303 template <typename Dispatch> getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,HANDLE * pHandle,Dispatch const & d) const12304 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 12305 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, 12306 HANDLE * pHandle, 12307 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12308 { 12309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12310 return static_cast<Result>( 12311 d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) ); 12312 } 12313 12314 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12315 template <typename Dispatch> getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,Dispatch const & d) const12316 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( 12317 VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const 12318 { 12319 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12320 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12321 VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleNV && "Function <vkGetMemoryWin32HandleNV> requires <VK_NV_external_memory_win32>" ); 12322 # endif 12323 12324 HANDLE handle; 12325 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12326 d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) ); 12327 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); 12328 12329 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 12330 } 12331 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12332 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 12333 12334 //=== VK_KHR_get_physical_device_properties2 === 12335 12336 template <typename Dispatch> getFeatures2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const12337 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, 12338 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12339 { 12340 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12341 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 12342 } 12343 12344 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12345 template <typename Dispatch> 12346 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const & d) const12347 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12348 { 12349 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12350 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12351 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && 12352 "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12353 # endif 12354 12355 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 12356 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 12357 12358 return features; 12359 } 12360 12361 template <typename X, typename Y, typename... Z, typename Dispatch> 12362 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const & d) const12363 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12364 { 12365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12366 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12367 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && 12368 "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12369 # endif 12370 12371 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 12372 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 12373 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 12374 12375 return structureChain; 12376 } 12377 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12378 12379 template <typename Dispatch> getProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const12380 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 12381 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12382 { 12383 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12384 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 12385 } 12386 12387 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12388 template <typename Dispatch> 12389 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const & d) const12390 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12391 { 12392 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12393 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12394 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && 12395 "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12396 # endif 12397 12398 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 12399 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 12400 12401 return properties; 12402 } 12403 12404 template <typename X, typename Y, typename... Z, typename Dispatch> 12405 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const & d) const12406 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12407 { 12408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12409 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12410 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && 12411 "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12412 # endif 12413 12414 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 12415 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 12416 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 12417 12418 return structureChain; 12419 } 12420 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12421 12422 template <typename Dispatch> getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const12423 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, 12424 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 12425 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12426 { 12427 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12428 d.vkGetPhysicalDeviceFormatProperties2KHR( 12429 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 12430 } 12431 12432 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12433 template <typename Dispatch> 12434 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const12435 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12436 { 12437 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12438 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12439 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && 12440 "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12441 # endif 12442 12443 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 12444 d.vkGetPhysicalDeviceFormatProperties2KHR( 12445 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 12446 12447 return formatProperties; 12448 } 12449 12450 template <typename X, typename Y, typename... Z, typename Dispatch> 12451 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const12452 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12453 { 12454 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12455 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12456 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && 12457 "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12458 # endif 12459 12460 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 12461 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 12462 d.vkGetPhysicalDeviceFormatProperties2KHR( 12463 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 12464 12465 return structureChain; 12466 } 12467 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12468 12469 template <typename Dispatch> 12470 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const12471 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 12472 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 12473 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12474 { 12475 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12476 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 12477 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 12478 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 12479 } 12480 12481 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12482 template <typename Dispatch> 12483 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const12484 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 12485 { 12486 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12487 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12488 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && 12489 "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12490 # endif 12491 12492 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 12493 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12494 d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 12495 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 12496 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 12497 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 12498 12499 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 12500 } 12501 12502 template <typename X, typename Y, typename... Z, typename Dispatch> 12503 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const12504 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 12505 { 12506 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12507 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12508 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && 12509 "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12510 # endif 12511 12512 StructureChain<X, Y, Z...> structureChain; 12513 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 12514 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12515 d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 12516 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 12517 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 12518 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 12519 12520 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 12521 } 12522 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12523 12524 template <typename Dispatch> getQueueFamilyProperties2KHR(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const12525 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, 12526 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 12527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12528 { 12529 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12530 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12531 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 12532 } 12533 12534 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12535 template <typename QueueFamilyProperties2Allocator, typename Dispatch> 12536 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(Dispatch const & d) const12537 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 12538 { 12539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12540 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12541 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12542 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12543 # endif 12544 12545 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 12546 uint32_t queueFamilyPropertyCount; 12547 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12548 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12549 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12550 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12551 12552 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12553 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12554 { 12555 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12556 } 12557 return queueFamilyProperties; 12558 } 12559 12560 template < 12561 typename QueueFamilyProperties2Allocator, 12562 typename Dispatch, 12563 typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 12564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const12565 PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const 12566 { 12567 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12568 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12569 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12570 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12571 # endif 12572 12573 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); 12574 uint32_t queueFamilyPropertyCount; 12575 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12576 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12577 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12578 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12579 12580 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12581 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12582 { 12583 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12584 } 12585 return queueFamilyProperties; 12586 } 12587 12588 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 12589 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(Dispatch const & d) const12590 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 12591 { 12592 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12593 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12594 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12595 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12596 # endif 12597 12598 std::vector<StructureChain, StructureChainAllocator> structureChains; 12599 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 12600 uint32_t queueFamilyPropertyCount; 12601 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12602 structureChains.resize( queueFamilyPropertyCount ); 12603 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12604 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 12605 { 12606 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 12607 } 12608 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12609 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12610 12611 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12612 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12613 { 12614 structureChains.resize( queueFamilyPropertyCount ); 12615 } 12616 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 12617 { 12618 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 12619 } 12620 return structureChains; 12621 } 12622 12623 template <typename StructureChain, 12624 typename StructureChainAllocator, 12625 typename Dispatch, 12626 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 12627 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const12628 PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const 12629 { 12630 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12631 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12632 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12633 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12634 # endif 12635 12636 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 12637 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 12638 uint32_t queueFamilyPropertyCount; 12639 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12640 structureChains.resize( queueFamilyPropertyCount ); 12641 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12642 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 12643 { 12644 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 12645 } 12646 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12647 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12648 12649 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12650 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12651 { 12652 structureChains.resize( queueFamilyPropertyCount ); 12653 } 12654 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 12655 { 12656 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 12657 } 12658 return structureChains; 12659 } 12660 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12661 12662 template <typename Dispatch> getMemoryProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const12663 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 12664 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12665 { 12666 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12667 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 12668 } 12669 12670 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12671 template <typename Dispatch> 12672 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const & d) const12673 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12674 { 12675 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12676 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12677 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && 12678 "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12679 # endif 12680 12681 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 12682 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 12683 12684 return memoryProperties; 12685 } 12686 12687 template <typename X, typename Y, typename... Z, typename Dispatch> 12688 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2KHR(Dispatch const & d) const12689 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12690 { 12691 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12692 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12693 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && 12694 "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12695 # endif 12696 12697 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 12698 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 12699 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 12700 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 12701 12702 return structureChain; 12703 } 12704 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12705 12706 template <typename Dispatch> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const12707 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 12708 uint32_t * pPropertyCount, 12709 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 12710 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12711 { 12712 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12713 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 12714 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 12715 pPropertyCount, 12716 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 12717 } 12718 12719 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12720 template <typename SparseImageFormatProperties2Allocator, typename Dispatch> 12721 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const12722 PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 12723 Dispatch const & d ) const 12724 { 12725 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12726 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12727 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && 12728 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12729 # endif 12730 12731 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 12732 uint32_t propertyCount; 12733 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 12734 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 12735 properties.resize( propertyCount ); 12736 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 12737 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 12738 &propertyCount, 12739 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 12740 12741 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 12742 if ( propertyCount < properties.size() ) 12743 { 12744 properties.resize( propertyCount ); 12745 } 12746 return properties; 12747 } 12748 12749 template < 12750 typename SparseImageFormatProperties2Allocator, 12751 typename Dispatch, 12752 typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, 12753 int>::type> 12754 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const12755 PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 12756 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 12757 Dispatch const & d ) const 12758 { 12759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12760 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12761 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && 12762 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12763 # endif 12764 12765 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); 12766 uint32_t propertyCount; 12767 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 12768 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 12769 properties.resize( propertyCount ); 12770 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 12771 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 12772 &propertyCount, 12773 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 12774 12775 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 12776 if ( propertyCount < properties.size() ) 12777 { 12778 properties.resize( propertyCount ); 12779 } 12780 return properties; 12781 } 12782 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12783 12784 //=== VK_KHR_device_group === 12785 12786 template <typename Dispatch> getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const12787 VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, 12788 uint32_t localDeviceIndex, 12789 uint32_t remoteDeviceIndex, 12790 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 12791 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12792 { 12793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12794 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( 12795 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 12796 } 12797 12798 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12799 template <typename Dispatch> getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const12800 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( 12801 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12802 { 12803 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12804 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12805 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeaturesKHR && 12806 "Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" ); 12807 # endif 12808 12809 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 12810 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( 12811 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 12812 12813 return peerMemoryFeatures; 12814 } 12815 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12816 12817 template <typename Dispatch> setDeviceMaskKHR(uint32_t deviceMask,Dispatch const & d) const12818 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12819 { 12820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12821 d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); 12822 } 12823 12824 template <typename Dispatch> dispatchBaseKHR(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const12825 VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, 12826 uint32_t baseGroupY, 12827 uint32_t baseGroupZ, 12828 uint32_t groupCountX, 12829 uint32_t groupCountY, 12830 uint32_t groupCountZ, 12831 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12832 { 12833 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12834 d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 12835 } 12836 12837 #if defined( VK_USE_PLATFORM_VI_NN ) 12838 //=== VK_NN_vi_surface === 12839 12840 template <typename Dispatch> createViSurfaceNN(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const12841 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, 12842 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12843 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 12844 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12845 { 12846 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12847 return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, 12848 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), 12849 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12850 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 12851 } 12852 12853 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12854 template <typename Dispatch> 12855 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12856 Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, 12857 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12858 Dispatch const & d ) const 12859 { 12860 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12861 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12862 VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" ); 12863 # endif 12864 12865 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12866 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12867 d.vkCreateViSurfaceNN( m_instance, 12868 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 12869 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12870 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12871 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); 12872 12873 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 12874 } 12875 12876 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12877 template <typename Dispatch> 12878 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createViSurfaceNNUnique(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12879 Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, 12880 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12881 Dispatch const & d ) const 12882 { 12883 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12884 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12885 VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" ); 12886 # endif 12887 12888 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12889 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12890 d.vkCreateViSurfaceNN( m_instance, 12891 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 12892 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12893 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12894 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); 12895 12896 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 12897 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 12898 } 12899 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12900 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12901 #endif /*VK_USE_PLATFORM_VI_NN*/ 12902 12903 //=== VK_KHR_maintenance1 === 12904 12905 template <typename Dispatch> trimCommandPoolKHR(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const12906 VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 12907 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 12908 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12909 { 12910 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12911 d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 12912 } 12913 12914 //=== VK_KHR_device_group_creation === 12915 12916 template <typename Dispatch> 12917 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumeratePhysicalDeviceGroupsKHR(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const12918 Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, 12919 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 12920 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12921 { 12922 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12923 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 12924 m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 12925 } 12926 12927 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12928 template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> 12929 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 12930 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const & d) const12931 Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const 12932 { 12933 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12934 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12935 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && 12936 "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 12937 # endif 12938 12939 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 12940 uint32_t physicalDeviceGroupCount; 12941 VULKAN_HPP_NAMESPACE::Result result; 12942 do 12943 { 12944 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 12945 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 12946 { 12947 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 12948 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 12949 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 12950 } 12951 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12952 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 12953 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 12954 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 12955 { 12956 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 12957 } 12958 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 12959 } 12960 12961 template <typename PhysicalDeviceGroupPropertiesAllocator, 12962 typename Dispatch, 12963 typename std::enable_if< 12964 std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, 12965 int>::type> 12966 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 12967 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const12968 Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 12969 { 12970 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12971 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12972 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && 12973 "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 12974 # endif 12975 12976 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 12977 physicalDeviceGroupPropertiesAllocator ); 12978 uint32_t physicalDeviceGroupCount; 12979 VULKAN_HPP_NAMESPACE::Result result; 12980 do 12981 { 12982 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 12983 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 12984 { 12985 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 12986 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 12987 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 12988 } 12989 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12990 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 12991 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 12992 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 12993 { 12994 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 12995 } 12996 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 12997 } 12998 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12999 13000 //=== VK_KHR_external_memory_capabilities === 13001 13002 template <typename Dispatch> getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const13003 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 13004 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 13005 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13006 { 13007 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13008 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, 13009 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 13010 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 13011 } 13012 13013 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13014 template <typename Dispatch> 13015 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const13016 PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, 13017 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13018 { 13019 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13020 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13021 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferPropertiesKHR && 13022 "Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" ); 13023 # endif 13024 13025 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 13026 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, 13027 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 13028 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 13029 13030 return externalBufferProperties; 13031 } 13032 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13033 13034 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 13035 //=== VK_KHR_external_memory_win32 === 13036 13037 template <typename Dispatch> getMemoryWin32HandleKHR(const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const13038 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, 13039 HANDLE * pHandle, 13040 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13041 { 13042 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13043 return static_cast<Result>( 13044 d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 13045 } 13046 13047 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13048 template <typename Dispatch> 13049 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR(const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const13050 Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 13051 { 13052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13053 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13054 VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleKHR && "Function <vkGetMemoryWin32HandleKHR> requires <VK_KHR_external_memory_win32>" ); 13055 # endif 13056 13057 HANDLE handle; 13058 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13059 d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 13060 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); 13061 13062 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 13063 } 13064 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13065 13066 template <typename Dispatch> 13067 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,Dispatch const & d) const13068 Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 13069 HANDLE handle, 13070 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, 13071 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13072 { 13073 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13074 return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, 13075 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 13076 handle, 13077 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) ); 13078 } 13079 13080 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13081 template <typename Dispatch> 13082 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,Dispatch const & d) const13083 Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const 13084 { 13085 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13086 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13087 VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandlePropertiesKHR && "Function <vkGetMemoryWin32HandlePropertiesKHR> requires <VK_KHR_external_memory_win32>" ); 13088 # endif 13089 13090 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; 13091 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13092 d.vkGetMemoryWin32HandlePropertiesKHR( m_device, 13093 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 13094 handle, 13095 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) ); 13096 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); 13097 13098 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryWin32HandleProperties ) ); 13099 } 13100 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13101 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 13102 13103 //=== VK_KHR_external_memory_fd === 13104 13105 template <typename Dispatch> getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const13106 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, 13107 int * pFd, 13108 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13109 { 13110 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13111 return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 13112 } 13113 13114 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13115 template <typename Dispatch> getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,Dispatch const & d) const13116 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, 13117 Dispatch const & d ) const 13118 { 13119 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13120 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13121 VULKAN_HPP_ASSERT( d.vkGetMemoryFdKHR && "Function <vkGetMemoryFdKHR> requires <VK_KHR_external_memory_fd>" ); 13122 # endif 13123 13124 int fd; 13125 VULKAN_HPP_NAMESPACE::Result result = 13126 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 13127 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); 13128 13129 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); 13130 } 13131 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13132 13133 template <typename Dispatch> getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,Dispatch const & d) const13134 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 13135 int fd, 13136 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, 13137 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13138 { 13139 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13140 return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( 13141 m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) ); 13142 } 13143 13144 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13145 template <typename Dispatch> 13146 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,Dispatch const & d) const13147 Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const 13148 { 13149 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13150 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13151 VULKAN_HPP_ASSERT( d.vkGetMemoryFdPropertiesKHR && "Function <vkGetMemoryFdPropertiesKHR> requires <VK_KHR_external_memory_fd>" ); 13152 # endif 13153 13154 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; 13155 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdPropertiesKHR( 13156 m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) ); 13157 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); 13158 13159 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryFdProperties ) ); 13160 } 13161 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13162 13163 //=== VK_KHR_external_semaphore_capabilities === 13164 13165 template <typename Dispatch> 13166 VULKAN_HPP_INLINE void getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const13167 PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 13168 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 13169 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13170 { 13171 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13172 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, 13173 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 13174 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 13175 } 13176 13177 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13178 template <typename Dispatch> 13179 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const13180 PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, 13181 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13182 { 13183 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13184 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13185 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && 13186 "Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" ); 13187 # endif 13188 13189 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 13190 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, 13191 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 13192 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 13193 13194 return externalSemaphoreProperties; 13195 } 13196 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13197 13198 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 13199 //=== VK_KHR_external_semaphore_win32 === 13200 13201 template <typename Dispatch> importSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,Dispatch const & d) const13202 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( 13203 const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13204 { 13205 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13206 return static_cast<Result>( 13207 d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) ); 13208 } 13209 13210 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13211 template <typename Dispatch> 13212 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,Dispatch const & d) const13213 Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, 13214 Dispatch const & d ) const 13215 { 13216 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13217 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13218 VULKAN_HPP_ASSERT( d.vkImportSemaphoreWin32HandleKHR && "Function <vkImportSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" ); 13219 # endif 13220 13221 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13222 d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) ); 13223 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); 13224 13225 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13226 } 13227 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13228 13229 template <typename Dispatch> getSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const13230 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( 13231 const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13232 { 13233 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13234 return static_cast<Result>( 13235 d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 13236 } 13237 13238 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13239 template <typename Dispatch> 13240 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const13241 Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 13242 { 13243 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13244 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13245 VULKAN_HPP_ASSERT( d.vkGetSemaphoreWin32HandleKHR && "Function <vkGetSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" ); 13246 # endif 13247 13248 HANDLE handle; 13249 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13250 d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 13251 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); 13252 13253 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 13254 } 13255 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13256 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 13257 13258 //=== VK_KHR_external_semaphore_fd === 13259 13260 template <typename Dispatch> importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,Dispatch const & d) const13261 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, 13262 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13263 { 13264 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13265 return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) ); 13266 } 13267 13268 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13269 template <typename Dispatch> 13270 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,Dispatch const & d) const13271 Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const 13272 { 13273 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13274 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13275 VULKAN_HPP_ASSERT( d.vkImportSemaphoreFdKHR && "Function <vkImportSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" ); 13276 # endif 13277 13278 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13279 d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) ); 13280 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); 13281 13282 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13283 } 13284 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13285 13286 template <typename Dispatch> getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const13287 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, 13288 int * pFd, 13289 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13290 { 13291 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13292 return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 13293 } 13294 13295 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13296 template <typename Dispatch> 13297 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo,Dispatch const & d) const13298 Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const 13299 { 13300 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13301 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13302 VULKAN_HPP_ASSERT( d.vkGetSemaphoreFdKHR && "Function <vkGetSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" ); 13303 # endif 13304 13305 int fd; 13306 VULKAN_HPP_NAMESPACE::Result result = 13307 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 13308 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); 13309 13310 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); 13311 } 13312 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13313 13314 //=== VK_KHR_push_descriptor === 13315 13316 template <typename Dispatch> pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,Dispatch const & d) const13317 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 13318 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 13319 uint32_t set, 13320 uint32_t descriptorWriteCount, 13321 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 13322 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13323 { 13324 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13325 d.vkCmdPushDescriptorSetKHR( m_commandBuffer, 13326 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 13327 static_cast<VkPipelineLayout>( layout ), 13328 set, 13329 descriptorWriteCount, 13330 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); 13331 } 13332 13333 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13334 template <typename Dispatch> 13335 VULKAN_HPP_INLINE void pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,Dispatch const & d) const13336 CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 13337 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 13338 uint32_t set, 13339 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 13340 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13341 { 13342 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13343 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13344 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetKHR && "Function <vkCmdPushDescriptorSetKHR> requires <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); 13345 # endif 13346 13347 d.vkCmdPushDescriptorSetKHR( m_commandBuffer, 13348 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 13349 static_cast<VkPipelineLayout>( layout ), 13350 set, 13351 descriptorWrites.size(), 13352 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); 13353 } 13354 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13355 13356 template <typename Dispatch> pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,const void * pData,Dispatch const & d) const13357 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13358 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 13359 uint32_t set, 13360 const void * pData, 13361 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13362 { 13363 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13364 d.vkCmdPushDescriptorSetWithTemplateKHR( 13365 m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData ); 13366 } 13367 13368 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13369 template <typename DataType, typename Dispatch> pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,DataType const & data,Dispatch const & d) const13370 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13371 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 13372 uint32_t set, 13373 DataType const & data, 13374 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13375 { 13376 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13377 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13378 VULKAN_HPP_ASSERT( 13379 d.vkCmdPushDescriptorSetWithTemplateKHR && 13380 "Function <vkCmdPushDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); 13381 # endif 13382 13383 d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, 13384 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13385 static_cast<VkPipelineLayout>( layout ), 13386 set, 13387 reinterpret_cast<const void *>( &data ) ); 13388 } 13389 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13390 13391 //=== VK_EXT_conditional_rendering === 13392 13393 template <typename Dispatch> beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,Dispatch const & d) const13394 VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, 13395 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13396 { 13397 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13398 d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) ); 13399 } 13400 13401 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13402 template <typename Dispatch> beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,Dispatch const & d) const13403 VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, 13404 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13405 { 13406 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13407 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13408 VULKAN_HPP_ASSERT( d.vkCmdBeginConditionalRenderingEXT && "Function <vkCmdBeginConditionalRenderingEXT> requires <VK_EXT_conditional_rendering>" ); 13409 # endif 13410 13411 d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) ); 13412 } 13413 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13414 13415 template <typename Dispatch> endConditionalRenderingEXT(Dispatch const & d) const13416 VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13417 { 13418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13419 d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); 13420 } 13421 13422 //=== VK_KHR_descriptor_update_template === 13423 13424 template <typename Dispatch> 13425 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const13426 Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 13427 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13428 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 13429 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13430 { 13431 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13432 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, 13433 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 13434 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13435 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 13436 } 13437 13438 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13439 template <typename Dispatch> 13440 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13441 Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 13442 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13443 Dispatch const & d ) const 13444 { 13445 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13446 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13447 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && 13448 "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 13449 # endif 13450 13451 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 13452 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR( 13453 m_device, 13454 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 13455 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13456 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 13457 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); 13458 13459 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); 13460 } 13461 13462 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13463 template <typename Dispatch> 13464 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateKHRUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13465 Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 13466 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13467 Dispatch const & d ) const 13468 { 13469 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13470 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13471 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && 13472 "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 13473 # endif 13474 13475 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 13476 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR( 13477 m_device, 13478 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 13479 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13480 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 13481 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); 13482 13483 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 13484 UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 13485 descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 13486 } 13487 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13488 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13489 13490 template <typename Dispatch> destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const13491 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13492 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13493 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13494 { 13495 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13496 d.vkDestroyDescriptorUpdateTemplateKHR( 13497 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 13498 } 13499 13500 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13501 template <typename Dispatch> destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13502 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13503 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13504 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13505 { 13506 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13507 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13508 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplateKHR && 13509 "Function <vkDestroyDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 13510 # endif 13511 13512 d.vkDestroyDescriptorUpdateTemplateKHR( 13513 m_device, 13514 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13515 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 13516 } 13517 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13518 13519 template <typename Dispatch> updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const13520 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 13521 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13522 const void * pData, 13523 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13524 { 13525 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13526 d.vkUpdateDescriptorSetWithTemplateKHR( 13527 m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); 13528 } 13529 13530 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13531 template <typename DataType, typename Dispatch> updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,DataType const & data,Dispatch const & d) const13532 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 13533 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13534 DataType const & data, 13535 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13536 { 13537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13538 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13539 VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplateKHR && 13540 "Function <vkUpdateDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 13541 # endif 13542 13543 d.vkUpdateDescriptorSetWithTemplateKHR( m_device, 13544 static_cast<VkDescriptorSet>( descriptorSet ), 13545 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13546 reinterpret_cast<const void *>( &data ) ); 13547 } 13548 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13549 13550 //=== VK_NV_clip_space_w_scaling === 13551 13552 template <typename Dispatch> setViewportWScalingNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,Dispatch const & d) const13553 VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, 13554 uint32_t viewportCount, 13555 const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, 13556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13557 { 13558 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13559 d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) ); 13560 } 13561 13562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13563 template <typename Dispatch> 13564 VULKAN_HPP_INLINE void setViewportWScalingNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,Dispatch const & d) const13565 CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, 13566 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, 13567 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13568 { 13569 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13570 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13571 VULKAN_HPP_ASSERT( d.vkCmdSetViewportWScalingNV && "Function <vkCmdSetViewportWScalingNV> requires <VK_NV_clip_space_w_scaling>" ); 13572 # endif 13573 13574 d.vkCmdSetViewportWScalingNV( 13575 m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) ); 13576 } 13577 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13578 13579 //=== VK_EXT_direct_mode_display === 13580 13581 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 13582 template <typename Dispatch> releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13583 VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13584 { 13585 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13586 return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 13587 } 13588 #else 13589 template <typename Dispatch> releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13590 VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13591 { 13592 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13593 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13594 VULKAN_HPP_ASSERT( d.vkReleaseDisplayEXT && "Function <vkReleaseDisplayEXT> requires <VK_EXT_direct_mode_display>" ); 13595 # endif 13596 13597 d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ); 13598 } 13599 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13600 13601 #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) 13602 //=== VK_EXT_acquire_xlib_display === 13603 13604 template <typename Dispatch> acquireXlibDisplayEXT(Display * dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13605 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, 13606 VULKAN_HPP_NAMESPACE::DisplayKHR display, 13607 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13608 { 13609 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13610 return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) ); 13611 } 13612 13613 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13614 template <typename Dispatch> 13615 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireXlibDisplayEXT(Display & dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13616 PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 13617 { 13618 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13619 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13620 VULKAN_HPP_ASSERT( d.vkAcquireXlibDisplayEXT && "Function <vkAcquireXlibDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); 13621 # endif 13622 13623 VULKAN_HPP_NAMESPACE::Result result = 13624 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) ); 13625 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); 13626 13627 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13628 } 13629 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13630 13631 template <typename Dispatch> getRandROutputDisplayEXT(Display * dpy,RROutput rrOutput,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const13632 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, 13633 RROutput rrOutput, 13634 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 13635 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13636 { 13637 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13638 return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 13639 } 13640 13641 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13642 template <typename Dispatch> 13643 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT(Display & dpy,RROutput rrOutput,Dispatch const & d) const13644 PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 13645 { 13646 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13647 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13648 VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); 13649 # endif 13650 13651 VULKAN_HPP_NAMESPACE::DisplayKHR display; 13652 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13653 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 13654 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); 13655 13656 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); 13657 } 13658 13659 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13660 template <typename Dispatch> 13661 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getRandROutputDisplayEXTUnique(Display & dpy,RROutput rrOutput,Dispatch const & d) const13662 PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 13663 { 13664 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13665 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13666 VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); 13667 # endif 13668 13669 VULKAN_HPP_NAMESPACE::DisplayKHR display; 13670 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13671 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 13672 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); 13673 13674 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 13675 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 13676 } 13677 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13678 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13679 #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ 13680 13681 //=== VK_EXT_display_surface_counter === 13682 13683 template <typename Dispatch> 13684 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,Dispatch const & d) const13685 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 13686 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, 13687 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13688 { 13689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13690 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( 13691 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) ); 13692 } 13693 13694 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13695 template <typename Dispatch> 13696 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const13697 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 13698 { 13699 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13700 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13701 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT && 13702 "Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> requires <VK_EXT_display_surface_counter>" ); 13703 # endif 13704 13705 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; 13706 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( 13707 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) ); 13708 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); 13709 13710 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); 13711 } 13712 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13713 13714 //=== VK_EXT_display_control === 13715 13716 template <typename Dispatch> displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,Dispatch const & d) const13717 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13718 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, 13719 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13720 { 13721 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13722 return static_cast<Result>( 13723 d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) ); 13724 } 13725 13726 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13727 template <typename Dispatch> displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,Dispatch const & d) const13728 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13729 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, 13730 Dispatch const & d ) const 13731 { 13732 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13733 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13734 VULKAN_HPP_ASSERT( d.vkDisplayPowerControlEXT && "Function <vkDisplayPowerControlEXT> requires <VK_EXT_display_control>" ); 13735 # endif 13736 13737 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13738 d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) ); 13739 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); 13740 13741 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13742 } 13743 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13744 13745 template <typename Dispatch> registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const13746 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, 13747 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13748 VULKAN_HPP_NAMESPACE::Fence * pFence, 13749 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13750 { 13751 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13752 return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, 13753 reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), 13754 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13755 reinterpret_cast<VkFence *>( pFence ) ) ); 13756 } 13757 13758 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13759 template <typename Dispatch> 13760 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13761 Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, 13762 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13763 Dispatch const & d ) const 13764 { 13765 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13766 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13767 VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" ); 13768 # endif 13769 13770 VULKAN_HPP_NAMESPACE::Fence fence; 13771 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT( 13772 m_device, 13773 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 13774 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13775 reinterpret_cast<VkFence *>( &fence ) ) ); 13776 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); 13777 13778 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); 13779 } 13780 13781 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13782 template <typename Dispatch> 13783 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerEventEXTUnique(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13784 Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, 13785 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13786 Dispatch const & d ) const 13787 { 13788 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13789 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13790 VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" ); 13791 # endif 13792 13793 VULKAN_HPP_NAMESPACE::Fence fence; 13794 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT( 13795 m_device, 13796 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 13797 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13798 reinterpret_cast<VkFence *>( &fence ) ) ); 13799 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); 13800 13801 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 13802 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 13803 } 13804 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13805 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13806 13807 template <typename Dispatch> registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const13808 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13809 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, 13810 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13811 VULKAN_HPP_NAMESPACE::Fence * pFence, 13812 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13813 { 13814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13815 return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, 13816 static_cast<VkDisplayKHR>( display ), 13817 reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), 13818 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13819 reinterpret_cast<VkFence *>( pFence ) ) ); 13820 } 13821 13822 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13823 template <typename Dispatch> 13824 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13825 Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13826 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, 13827 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13828 Dispatch const & d ) const 13829 { 13830 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13831 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13832 VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" ); 13833 # endif 13834 13835 VULKAN_HPP_NAMESPACE::Fence fence; 13836 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT( 13837 m_device, 13838 static_cast<VkDisplayKHR>( display ), 13839 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 13840 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13841 reinterpret_cast<VkFence *>( &fence ) ) ); 13842 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); 13843 13844 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); 13845 } 13846 13847 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13848 template <typename Dispatch> 13849 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerDisplayEventEXTUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13850 Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13851 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, 13852 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13853 Dispatch const & d ) const 13854 { 13855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13856 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13857 VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" ); 13858 # endif 13859 13860 VULKAN_HPP_NAMESPACE::Fence fence; 13861 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT( 13862 m_device, 13863 static_cast<VkDisplayKHR>( display ), 13864 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 13865 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13866 reinterpret_cast<VkFence *>( &fence ) ) ); 13867 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); 13868 13869 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 13870 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 13871 } 13872 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13873 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13874 13875 template <typename Dispatch> getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,uint64_t * pCounterValue,Dispatch const & d) const13876 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13877 VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, 13878 uint64_t * pCounterValue, 13879 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13880 { 13881 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13882 return static_cast<Result>( 13883 d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) ); 13884 } 13885 13886 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13887 template <typename Dispatch> getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,Dispatch const & d) const13888 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( 13889 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const 13890 { 13891 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13892 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13893 VULKAN_HPP_ASSERT( d.vkGetSwapchainCounterEXT && "Function <vkGetSwapchainCounterEXT> requires <VK_EXT_display_control>" ); 13894 # endif 13895 13896 uint64_t counterValue; 13897 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13898 d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) ); 13899 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); 13900 13901 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( counterValue ) ); 13902 } 13903 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13904 13905 //=== VK_GOOGLE_display_timing === 13906 13907 template <typename Dispatch> 13908 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,Dispatch const & d) const13909 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13910 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, 13911 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13912 { 13913 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13914 return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( 13915 m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) ); 13916 } 13917 13918 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13919 template <typename Dispatch> 13920 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const13921 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 13922 { 13923 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13924 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13925 VULKAN_HPP_ASSERT( d.vkGetRefreshCycleDurationGOOGLE && "Function <vkGetRefreshCycleDurationGOOGLE> requires <VK_GOOGLE_display_timing>" ); 13926 # endif 13927 13928 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; 13929 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRefreshCycleDurationGOOGLE( 13930 m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) ); 13931 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); 13932 13933 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displayTimingProperties ) ); 13934 } 13935 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13936 13937 template <typename Dispatch> 13938 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pPresentationTimingCount,VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,Dispatch const & d) const13939 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13940 uint32_t * pPresentationTimingCount, 13941 VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, 13942 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13943 { 13944 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13945 return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, 13946 static_cast<VkSwapchainKHR>( swapchain ), 13947 pPresentationTimingCount, 13948 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) ); 13949 } 13950 13951 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13952 template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch> 13953 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13954 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const13955 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 13956 { 13957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13958 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13959 VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" ); 13960 # endif 13961 13962 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings; 13963 uint32_t presentationTimingCount; 13964 VULKAN_HPP_NAMESPACE::Result result; 13965 do 13966 { 13967 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13968 d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 13969 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) 13970 { 13971 presentationTimings.resize( presentationTimingCount ); 13972 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13973 d.vkGetPastPresentationTimingGOOGLE( m_device, 13974 static_cast<VkSwapchainKHR>( swapchain ), 13975 &presentationTimingCount, 13976 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 13977 } 13978 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13979 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 13980 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 13981 if ( presentationTimingCount < presentationTimings.size() ) 13982 { 13983 presentationTimings.resize( presentationTimingCount ); 13984 } 13985 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); 13986 } 13987 13988 template < 13989 typename PastPresentationTimingGOOGLEAllocator, 13990 typename Dispatch, 13991 typename std::enable_if<std::is_same<typename PastPresentationTimingGOOGLEAllocator::value_type, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, 13992 int>::type> 13993 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13994 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,Dispatch const & d) const13995 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13996 PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, 13997 Dispatch const & d ) const 13998 { 13999 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14000 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14001 VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" ); 14002 # endif 14003 14004 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings( 14005 pastPresentationTimingGOOGLEAllocator ); 14006 uint32_t presentationTimingCount; 14007 VULKAN_HPP_NAMESPACE::Result result; 14008 do 14009 { 14010 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14011 d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 14012 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) 14013 { 14014 presentationTimings.resize( presentationTimingCount ); 14015 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14016 d.vkGetPastPresentationTimingGOOGLE( m_device, 14017 static_cast<VkSwapchainKHR>( swapchain ), 14018 &presentationTimingCount, 14019 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 14020 } 14021 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14022 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 14023 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 14024 if ( presentationTimingCount < presentationTimings.size() ) 14025 { 14026 presentationTimings.resize( presentationTimingCount ); 14027 } 14028 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); 14029 } 14030 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14031 14032 //=== VK_EXT_discard_rectangles === 14033 14034 template <typename Dispatch> setDiscardRectangleEXT(uint32_t firstDiscardRectangle,uint32_t discardRectangleCount,const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,Dispatch const & d) const14035 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 14036 uint32_t discardRectangleCount, 14037 const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, 14038 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14039 { 14040 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14041 d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) ); 14042 } 14043 14044 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14045 template <typename Dispatch> setDiscardRectangleEXT(uint32_t firstDiscardRectangle,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,Dispatch const & d) const14046 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 14047 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, 14048 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14049 { 14050 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14051 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14052 VULKAN_HPP_ASSERT( d.vkCmdSetDiscardRectangleEXT && "Function <vkCmdSetDiscardRectangleEXT> requires <VK_EXT_discard_rectangles>" ); 14053 # endif 14054 14055 d.vkCmdSetDiscardRectangleEXT( 14056 m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) ); 14057 } 14058 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14059 14060 template <typename Dispatch> setDiscardRectangleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable,Dispatch const & d) const14061 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, 14062 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14063 { 14064 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14065 d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast<VkBool32>( discardRectangleEnable ) ); 14066 } 14067 14068 template <typename Dispatch> setDiscardRectangleModeEXT(VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode,Dispatch const & d) const14069 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, 14070 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14071 { 14072 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14073 d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) ); 14074 } 14075 14076 //=== VK_EXT_hdr_metadata === 14077 14078 template <typename Dispatch> setHdrMetadataEXT(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,Dispatch const & d) const14079 VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, 14080 const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 14081 const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, 14082 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14083 { 14084 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14085 d.vkSetHdrMetadataEXT( 14086 m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) ); 14087 } 14088 14089 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14090 template <typename Dispatch> setHdrMetadataEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,Dispatch const & d) const14091 VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, 14092 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, 14093 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 14094 { 14095 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14096 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14097 VULKAN_HPP_ASSERT( d.vkSetHdrMetadataEXT && "Function <vkSetHdrMetadataEXT> requires <VK_EXT_hdr_metadata>" ); 14098 # endif 14099 # ifdef VULKAN_HPP_NO_EXCEPTIONS 14100 VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); 14101 # else 14102 if ( swapchains.size() != metadata.size() ) 14103 { 14104 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); 14105 } 14106 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 14107 14108 d.vkSetHdrMetadataEXT( m_device, 14109 swapchains.size(), 14110 reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), 14111 reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) ); 14112 } 14113 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14114 14115 //=== VK_KHR_create_renderpass2 === 14116 14117 template <typename Dispatch> createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const14118 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 14119 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14120 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 14121 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14122 { 14123 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14124 return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, 14125 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 14126 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 14127 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 14128 } 14129 14130 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14131 template <typename Dispatch> 14132 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14133 Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 14134 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14135 Dispatch const & d ) const 14136 { 14137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14138 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14139 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14140 # endif 14141 14142 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 14143 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14144 d.vkCreateRenderPass2KHR( m_device, 14145 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 14146 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14147 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 14148 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); 14149 14150 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); 14151 } 14152 14153 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14154 template <typename Dispatch> 14155 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2KHRUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14156 Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 14157 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14158 Dispatch const & d ) const 14159 { 14160 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14161 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14162 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14163 # endif 14164 14165 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 14166 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14167 d.vkCreateRenderPass2KHR( m_device, 14168 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 14169 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14170 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 14171 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); 14172 14173 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 14174 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 14175 } 14176 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 14177 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14178 14179 template <typename Dispatch> beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const14180 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 14181 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 14182 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14183 { 14184 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14185 d.vkCmdBeginRenderPass2KHR( 14186 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 14187 } 14188 14189 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14190 template <typename Dispatch> beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const14191 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 14192 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 14193 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14194 { 14195 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14196 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14197 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2KHR && "Function <vkCmdBeginRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14198 # endif 14199 14200 d.vkCmdBeginRenderPass2KHR( 14201 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 14202 } 14203 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14204 14205 template <typename Dispatch> nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const14206 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 14207 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 14208 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14209 { 14210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14211 d.vkCmdNextSubpass2KHR( 14212 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 14213 } 14214 14215 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14216 template <typename Dispatch> nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const14217 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 14218 const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 14219 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14220 { 14221 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14222 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14223 VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2KHR && "Function <vkCmdNextSubpass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14224 # endif 14225 14226 d.vkCmdNextSubpass2KHR( 14227 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 14228 } 14229 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14230 14231 template <typename Dispatch> endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const14232 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 14233 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14234 { 14235 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14236 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 14237 } 14238 14239 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14240 template <typename Dispatch> endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const14241 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 14242 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14243 { 14244 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14245 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14246 VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2KHR && "Function <vkCmdEndRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14247 # endif 14248 14249 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 14250 } 14251 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14252 14253 //=== VK_KHR_shared_presentable_image === 14254 14255 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 14256 template <typename Dispatch> getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const14257 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 14258 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14259 { 14260 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14261 return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 14262 } 14263 #else 14264 template <typename Dispatch> getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const14265 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 14266 Dispatch const & d ) const 14267 { 14268 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14269 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14270 VULKAN_HPP_ASSERT( d.vkGetSwapchainStatusKHR && "Function <vkGetSwapchainStatusKHR> requires <VK_KHR_shared_presentable_image>" ); 14271 # endif 14272 14273 VULKAN_HPP_NAMESPACE::Result result = 14274 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 14275 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 14276 VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", 14277 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 14278 14279 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 14280 } 14281 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14282 14283 //=== VK_KHR_external_fence_capabilities === 14284 14285 template <typename Dispatch> getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const14286 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 14287 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 14288 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14289 { 14290 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14291 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, 14292 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 14293 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 14294 } 14295 14296 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14297 template <typename Dispatch> 14298 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const14299 PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, 14300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14301 { 14302 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14303 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14304 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFencePropertiesKHR && 14305 "Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" ); 14306 # endif 14307 14308 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 14309 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, 14310 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 14311 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 14312 14313 return externalFenceProperties; 14314 } 14315 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14316 14317 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 14318 //=== VK_KHR_external_fence_win32 === 14319 14320 template <typename Dispatch> importFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,Dispatch const & d) const14321 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( 14322 const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14323 { 14324 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14325 return static_cast<Result>( 14326 d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) ); 14327 } 14328 14329 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14330 template <typename Dispatch> 14331 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,Dispatch const & d) const14332 Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const 14333 { 14334 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14335 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14336 VULKAN_HPP_ASSERT( d.vkImportFenceWin32HandleKHR && "Function <vkImportFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" ); 14337 # endif 14338 14339 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14340 d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) ); 14341 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); 14342 14343 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14344 } 14345 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14346 14347 template <typename Dispatch> getFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const14348 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, 14349 HANDLE * pHandle, 14350 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14351 { 14352 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14353 return static_cast<Result>( 14354 d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 14355 } 14356 14357 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14358 template <typename Dispatch> 14359 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const14360 Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 14361 { 14362 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14363 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14364 VULKAN_HPP_ASSERT( d.vkGetFenceWin32HandleKHR && "Function <vkGetFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" ); 14365 # endif 14366 14367 HANDLE handle; 14368 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14369 d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 14370 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); 14371 14372 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 14373 } 14374 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14375 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 14376 14377 //=== VK_KHR_external_fence_fd === 14378 14379 template <typename Dispatch> importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,Dispatch const & d) const14380 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, 14381 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14382 { 14383 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14384 return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) ); 14385 } 14386 14387 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14388 template <typename Dispatch> 14389 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo,Dispatch const & d) const14390 Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const 14391 { 14392 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14393 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14394 VULKAN_HPP_ASSERT( d.vkImportFenceFdKHR && "Function <vkImportFenceFdKHR> requires <VK_KHR_external_fence_fd>" ); 14395 # endif 14396 14397 VULKAN_HPP_NAMESPACE::Result result = 14398 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) ); 14399 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); 14400 14401 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14402 } 14403 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14404 14405 template <typename Dispatch> getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const14406 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, 14407 int * pFd, 14408 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14409 { 14410 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14411 return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 14412 } 14413 14414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14415 template <typename Dispatch> getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,Dispatch const & d) const14416 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, 14417 Dispatch const & d ) const 14418 { 14419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14420 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14421 VULKAN_HPP_ASSERT( d.vkGetFenceFdKHR && "Function <vkGetFenceFdKHR> requires <VK_KHR_external_fence_fd>" ); 14422 # endif 14423 14424 int fd; 14425 VULKAN_HPP_NAMESPACE::Result result = 14426 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 14427 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); 14428 14429 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); 14430 } 14431 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14432 14433 //=== VK_KHR_performance_query === 14434 14435 template <typename Dispatch> 14436 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,uint32_t * pCounterCount,VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,Dispatch const & d) const14437 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, 14438 uint32_t * pCounterCount, 14439 VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, 14440 VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, 14441 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14442 { 14443 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14444 return static_cast<Result>( 14445 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, 14446 queueFamilyIndex, 14447 pCounterCount, 14448 reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), 14449 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) ); 14450 } 14451 14452 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14453 template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch> 14454 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14455 typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 14456 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,Dispatch const & d) const14457 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const 14458 { 14459 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14460 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14461 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && 14462 "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" ); 14463 # endif 14464 14465 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 14466 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 14467 data_; 14468 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; 14469 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; 14470 uint32_t counterCount; 14471 VULKAN_HPP_NAMESPACE::Result result; 14472 do 14473 { 14474 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14475 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 14476 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) 14477 { 14478 counters.resize( counterCount ); 14479 counterDescriptions.resize( counterCount ); 14480 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 14481 m_physicalDevice, 14482 queueFamilyIndex, 14483 &counterCount, 14484 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 14485 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 14486 } 14487 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14488 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 14489 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 14490 if ( counterCount < counters.size() ) 14491 { 14492 counters.resize( counterCount ); 14493 counterDescriptions.resize( counterCount ); 14494 } 14495 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 14496 } 14497 14498 template <typename PerformanceCounterKHRAllocator, 14499 typename PerformanceCounterDescriptionKHRAllocator, 14500 typename Dispatch, 14501 typename std::enable_if< 14502 std::is_same<typename PerformanceCounterKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value && 14503 std::is_same<typename PerformanceCounterDescriptionKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, 14504 int>::type> 14505 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14506 typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 14507 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,Dispatch const & d) const14508 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, 14509 PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, 14510 PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, 14511 Dispatch const & d ) const 14512 { 14513 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14514 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14515 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && 14516 "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" ); 14517 # endif 14518 14519 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 14520 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 14521 data_( 14522 std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); 14523 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; 14524 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; 14525 uint32_t counterCount; 14526 VULKAN_HPP_NAMESPACE::Result result; 14527 do 14528 { 14529 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14530 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 14531 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) 14532 { 14533 counters.resize( counterCount ); 14534 counterDescriptions.resize( counterCount ); 14535 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 14536 m_physicalDevice, 14537 queueFamilyIndex, 14538 &counterCount, 14539 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 14540 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 14541 } 14542 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14543 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 14544 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 14545 if ( counterCount < counters.size() ) 14546 { 14547 counters.resize( counterCount ); 14548 counterDescriptions.resize( counterCount ); 14549 } 14550 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 14551 } 14552 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14553 14554 template <typename Dispatch> 14555 VULKAN_HPP_INLINE void getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,uint32_t * pNumPasses,Dispatch const & d) const14556 PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, 14557 uint32_t * pNumPasses, 14558 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14559 { 14560 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14561 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( 14562 m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses ); 14563 } 14564 14565 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14566 template <typename Dispatch> getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,Dispatch const & d) const14567 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( 14568 const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14569 { 14570 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14571 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14572 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && 14573 "Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> requires <VK_KHR_performance_query>" ); 14574 # endif 14575 14576 uint32_t numPasses; 14577 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( 14578 m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses ); 14579 14580 return numPasses; 14581 } 14582 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14583 14584 template <typename Dispatch> acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,Dispatch const & d) const14585 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, 14586 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14587 { 14588 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14589 return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) ); 14590 } 14591 14592 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14593 template <typename Dispatch> 14594 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info,Dispatch const & d) const14595 Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const 14596 { 14597 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14599 VULKAN_HPP_ASSERT( d.vkAcquireProfilingLockKHR && "Function <vkAcquireProfilingLockKHR> requires <VK_KHR_performance_query>" ); 14600 # endif 14601 14602 VULKAN_HPP_NAMESPACE::Result result = 14603 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) ); 14604 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); 14605 14606 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14607 } 14608 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14609 14610 template <typename Dispatch> releaseProfilingLockKHR(Dispatch const & d) const14611 VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14612 { 14613 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14614 d.vkReleaseProfilingLockKHR( m_device ); 14615 } 14616 14617 //=== VK_KHR_get_surface_capabilities2 === 14618 14619 template <typename Dispatch> 14620 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,Dispatch const & d) const14621 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 14622 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, 14623 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14624 { 14625 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14626 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 14627 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 14628 reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) ); 14629 } 14630 14631 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14632 template <typename Dispatch> 14633 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const14634 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 14635 { 14636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14637 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14638 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && 14639 "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14640 # endif 14641 14642 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; 14643 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14644 d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 14645 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14646 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 14647 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 14648 14649 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); 14650 } 14651 14652 template <typename X, typename Y, typename... Z, typename Dispatch> 14653 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const14654 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 14655 { 14656 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14657 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14658 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && 14659 "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14660 # endif 14661 14662 StructureChain<X, Y, Z...> structureChain; 14663 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>(); 14664 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14665 d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 14666 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14667 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 14668 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 14669 14670 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 14671 } 14672 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14673 14674 template <typename Dispatch> getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,Dispatch const & d) const14675 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 14676 uint32_t * pSurfaceFormatCount, 14677 VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, 14678 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14679 { 14680 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14681 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14682 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 14683 pSurfaceFormatCount, 14684 reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) ); 14685 } 14686 14687 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14688 template <typename SurfaceFormat2KHRAllocator, typename Dispatch> 14689 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const14690 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 14691 { 14692 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14693 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14694 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 14695 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14696 # endif 14697 14698 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats; 14699 uint32_t surfaceFormatCount; 14700 VULKAN_HPP_NAMESPACE::Result result; 14701 do 14702 { 14703 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 14704 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 14705 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 14706 { 14707 surfaceFormats.resize( surfaceFormatCount ); 14708 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14709 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14710 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14711 &surfaceFormatCount, 14712 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 14713 } 14714 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14715 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 14716 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 14717 if ( surfaceFormatCount < surfaceFormats.size() ) 14718 { 14719 surfaceFormats.resize( surfaceFormatCount ); 14720 } 14721 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 14722 } 14723 14724 template <typename SurfaceFormat2KHRAllocator, 14725 typename Dispatch, 14726 typename std::enable_if<std::is_same<typename SurfaceFormat2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, int>::type> 14727 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,Dispatch const & d) const14728 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 14729 SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, 14730 Dispatch const & d ) const 14731 { 14732 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14733 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14734 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 14735 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14736 # endif 14737 14738 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator ); 14739 uint32_t surfaceFormatCount; 14740 VULKAN_HPP_NAMESPACE::Result result; 14741 do 14742 { 14743 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 14744 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 14745 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 14746 { 14747 surfaceFormats.resize( surfaceFormatCount ); 14748 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14749 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14750 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14751 &surfaceFormatCount, 14752 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 14753 } 14754 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14755 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 14756 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 14757 if ( surfaceFormatCount < surfaceFormats.size() ) 14758 { 14759 surfaceFormats.resize( surfaceFormatCount ); 14760 } 14761 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 14762 } 14763 14764 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 14765 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const14766 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 14767 { 14768 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14769 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14770 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 14771 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14772 # endif 14773 14774 std::vector<StructureChain, StructureChainAllocator> structureChains; 14775 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; 14776 uint32_t surfaceFormatCount; 14777 VULKAN_HPP_NAMESPACE::Result result; 14778 do 14779 { 14780 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 14781 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 14782 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 14783 { 14784 structureChains.resize( surfaceFormatCount ); 14785 surfaceFormats.resize( surfaceFormatCount ); 14786 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 14787 { 14788 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; 14789 } 14790 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14791 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14792 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14793 &surfaceFormatCount, 14794 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 14795 } 14796 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14797 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 14798 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 14799 if ( surfaceFormatCount < surfaceFormats.size() ) 14800 { 14801 structureChains.resize( surfaceFormatCount ); 14802 } 14803 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 14804 { 14805 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; 14806 } 14807 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 14808 } 14809 14810 template <typename StructureChain, 14811 typename StructureChainAllocator, 14812 typename Dispatch, 14813 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 14814 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,StructureChainAllocator & structureChainAllocator,Dispatch const & d) const14815 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 14816 StructureChainAllocator & structureChainAllocator, 14817 Dispatch const & d ) const 14818 { 14819 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14820 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14821 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 14822 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14823 # endif 14824 14825 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 14826 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; 14827 uint32_t surfaceFormatCount; 14828 VULKAN_HPP_NAMESPACE::Result result; 14829 do 14830 { 14831 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 14832 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 14833 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 14834 { 14835 structureChains.resize( surfaceFormatCount ); 14836 surfaceFormats.resize( surfaceFormatCount ); 14837 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 14838 { 14839 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; 14840 } 14841 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14842 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14843 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14844 &surfaceFormatCount, 14845 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 14846 } 14847 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14848 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 14849 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 14850 if ( surfaceFormatCount < surfaceFormats.size() ) 14851 { 14852 structureChains.resize( surfaceFormatCount ); 14853 } 14854 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 14855 { 14856 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; 14857 } 14858 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 14859 } 14860 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14861 14862 //=== VK_KHR_get_display_properties2 === 14863 14864 template <typename Dispatch> getDisplayProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,Dispatch const & d) const14865 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, 14866 VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, 14867 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14868 { 14869 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14870 return static_cast<Result>( 14871 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) ); 14872 } 14873 14874 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14875 template <typename DisplayProperties2KHRAllocator, typename Dispatch> 14876 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14877 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(Dispatch const & d) const14878 PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const 14879 { 14880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14881 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14882 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && 14883 "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 14884 # endif 14885 14886 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties; 14887 uint32_t propertyCount; 14888 VULKAN_HPP_NAMESPACE::Result result; 14889 do 14890 { 14891 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 14892 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 14893 { 14894 properties.resize( propertyCount ); 14895 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14896 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 14897 } 14898 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14899 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 14900 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 14901 if ( propertyCount < properties.size() ) 14902 { 14903 properties.resize( propertyCount ); 14904 } 14905 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 14906 } 14907 14908 template < 14909 typename DisplayProperties2KHRAllocator, 14910 typename Dispatch, 14911 typename std::enable_if<std::is_same<typename DisplayProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, int>::type> 14912 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14913 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,Dispatch const & d) const14914 PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const 14915 { 14916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14917 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14918 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && 14919 "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 14920 # endif 14921 14922 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator ); 14923 uint32_t propertyCount; 14924 VULKAN_HPP_NAMESPACE::Result result; 14925 do 14926 { 14927 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 14928 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 14929 { 14930 properties.resize( propertyCount ); 14931 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14932 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 14933 } 14934 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14935 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 14936 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 14937 if ( propertyCount < properties.size() ) 14938 { 14939 properties.resize( propertyCount ); 14940 } 14941 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 14942 } 14943 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14944 14945 template <typename Dispatch> getDisplayPlaneProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,Dispatch const & d) const14946 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, 14947 VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, 14948 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14949 { 14950 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14951 return static_cast<Result>( 14952 d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) ); 14953 } 14954 14955 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14956 template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch> 14957 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14958 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(Dispatch const & d) const14959 PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const 14960 { 14961 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14962 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14963 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && 14964 "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 14965 # endif 14966 14967 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties; 14968 uint32_t propertyCount; 14969 VULKAN_HPP_NAMESPACE::Result result; 14970 do 14971 { 14972 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 14973 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 14974 { 14975 properties.resize( propertyCount ); 14976 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 14977 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 14978 } 14979 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14980 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 14981 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 14982 if ( propertyCount < properties.size() ) 14983 { 14984 properties.resize( propertyCount ); 14985 } 14986 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 14987 } 14988 14989 template < 14990 typename DisplayPlaneProperties2KHRAllocator, 14991 typename Dispatch, 14992 typename std::enable_if<std::is_same<typename DisplayPlaneProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, 14993 int>::type> 14994 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14995 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,Dispatch const & d) const14996 PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const 14997 { 14998 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14999 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15000 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && 15001 "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15002 # endif 15003 15004 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator ); 15005 uint32_t propertyCount; 15006 VULKAN_HPP_NAMESPACE::Result result; 15007 do 15008 { 15009 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 15010 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15011 { 15012 properties.resize( propertyCount ); 15013 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 15014 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 15015 } 15016 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15017 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 15018 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15019 if ( propertyCount < properties.size() ) 15020 { 15021 properties.resize( propertyCount ); 15022 } 15023 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15024 } 15025 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15026 15027 template <typename Dispatch> getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,Dispatch const & d) const15028 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 15029 uint32_t * pPropertyCount, 15030 VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, 15031 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15032 { 15033 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15034 return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( 15035 m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) ); 15036 } 15037 15038 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15039 template <typename DisplayModeProperties2KHRAllocator, typename Dispatch> 15040 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15041 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const15042 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 15043 { 15044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15045 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15046 VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15047 # endif 15048 15049 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties; 15050 uint32_t propertyCount; 15051 VULKAN_HPP_NAMESPACE::Result result; 15052 do 15053 { 15054 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15055 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 15056 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15057 { 15058 properties.resize( propertyCount ); 15059 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 15060 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 15061 } 15062 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15063 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 15064 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15065 if ( propertyCount < properties.size() ) 15066 { 15067 properties.resize( propertyCount ); 15068 } 15069 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15070 } 15071 15072 template < 15073 typename DisplayModeProperties2KHRAllocator, 15074 typename Dispatch, 15075 typename std::enable_if<std::is_same<typename DisplayModeProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, 15076 int>::type> 15077 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15078 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,Dispatch const & d) const15079 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 15080 DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, 15081 Dispatch const & d ) const 15082 { 15083 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15084 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15085 VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15086 # endif 15087 15088 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator ); 15089 uint32_t propertyCount; 15090 VULKAN_HPP_NAMESPACE::Result result; 15091 do 15092 { 15093 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15094 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 15095 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15096 { 15097 properties.resize( propertyCount ); 15098 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 15099 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 15100 } 15101 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15102 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 15103 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15104 if ( propertyCount < properties.size() ) 15105 { 15106 properties.resize( propertyCount ); 15107 } 15108 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15109 } 15110 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15111 15112 template <typename Dispatch> 15113 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,Dispatch const & d) const15114 PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, 15115 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, 15116 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15117 { 15118 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15119 return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, 15120 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), 15121 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) ); 15122 } 15123 15124 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15125 template <typename Dispatch> 15126 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo,Dispatch const & d) const15127 PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const 15128 { 15129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15130 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15131 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilities2KHR && "Function <vkGetDisplayPlaneCapabilities2KHR> requires <VK_KHR_get_display_properties2>" ); 15132 # endif 15133 15134 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; 15135 VULKAN_HPP_NAMESPACE::Result result = 15136 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, 15137 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), 15138 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) ); 15139 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); 15140 15141 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); 15142 } 15143 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15144 15145 #if defined( VK_USE_PLATFORM_IOS_MVK ) 15146 //=== VK_MVK_ios_surface === 15147 15148 template <typename Dispatch> createIOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const15149 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, 15150 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15151 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 15152 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15153 { 15154 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15155 return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, 15156 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), 15157 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15158 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 15159 } 15160 15161 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15162 template <typename Dispatch> 15163 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15164 Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, 15165 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15166 Dispatch const & d ) const 15167 { 15168 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15169 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15170 VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" ); 15171 # endif 15172 15173 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15174 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15175 d.vkCreateIOSSurfaceMVK( m_instance, 15176 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 15177 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15178 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15179 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); 15180 15181 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 15182 } 15183 15184 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15185 template <typename Dispatch> 15186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createIOSSurfaceMVKUnique(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15187 Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, 15188 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15189 Dispatch const & d ) const 15190 { 15191 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15192 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15193 VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" ); 15194 # endif 15195 15196 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15197 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15198 d.vkCreateIOSSurfaceMVK( m_instance, 15199 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 15200 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15201 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15202 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); 15203 15204 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 15205 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 15206 } 15207 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15208 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15209 #endif /*VK_USE_PLATFORM_IOS_MVK*/ 15210 15211 #if defined( VK_USE_PLATFORM_MACOS_MVK ) 15212 //=== VK_MVK_macos_surface === 15213 15214 template <typename Dispatch> createMacOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const15215 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, 15216 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15217 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 15218 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15219 { 15220 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15221 return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, 15222 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), 15223 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15224 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 15225 } 15226 15227 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15228 template <typename Dispatch> 15229 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15230 Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, 15231 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15232 Dispatch const & d ) const 15233 { 15234 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15235 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15236 VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" ); 15237 # endif 15238 15239 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15240 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15241 d.vkCreateMacOSSurfaceMVK( m_instance, 15242 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 15243 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15244 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15245 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); 15246 15247 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 15248 } 15249 15250 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15251 template <typename Dispatch> 15252 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMacOSSurfaceMVKUnique(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15253 Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, 15254 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15255 Dispatch const & d ) const 15256 { 15257 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15258 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15259 VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" ); 15260 # endif 15261 15262 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15263 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15264 d.vkCreateMacOSSurfaceMVK( m_instance, 15265 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 15266 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15267 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15268 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); 15269 15270 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 15271 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 15272 } 15273 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15274 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15275 #endif /*VK_USE_PLATFORM_MACOS_MVK*/ 15276 15277 //=== VK_EXT_debug_utils === 15278 15279 template <typename Dispatch> setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,Dispatch const & d) const15280 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, 15281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15282 { 15283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15284 return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) ); 15285 } 15286 15287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15288 template <typename Dispatch> 15289 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo,Dispatch const & d) const15290 Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 15291 { 15292 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15293 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15294 VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectNameEXT && "Function <vkSetDebugUtilsObjectNameEXT> requires <VK_EXT_debug_utils>" ); 15295 # endif 15296 15297 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15298 d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) ); 15299 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); 15300 15301 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 15302 } 15303 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15304 15305 template <typename Dispatch> setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,Dispatch const & d) const15306 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, 15307 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15308 { 15309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15310 return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) ); 15311 } 15312 15313 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15314 template <typename Dispatch> 15315 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo,Dispatch const & d) const15316 Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 15317 { 15318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15319 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15320 VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectTagEXT && "Function <vkSetDebugUtilsObjectTagEXT> requires <VK_EXT_debug_utils>" ); 15321 # endif 15322 15323 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15324 d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) ); 15325 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); 15326 15327 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 15328 } 15329 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15330 15331 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const15332 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 15333 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15334 { 15335 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15336 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 15337 } 15338 15339 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15340 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const15341 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 15342 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15343 { 15344 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15345 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15346 VULKAN_HPP_ASSERT( d.vkQueueBeginDebugUtilsLabelEXT && "Function <vkQueueBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 15347 # endif 15348 15349 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 15350 } 15351 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15352 15353 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const15354 VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15355 { 15356 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15357 d.vkQueueEndDebugUtilsLabelEXT( m_queue ); 15358 } 15359 15360 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const15361 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 15362 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15363 { 15364 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15365 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 15366 } 15367 15368 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15369 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const15370 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 15371 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15372 { 15373 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15374 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15375 VULKAN_HPP_ASSERT( d.vkQueueInsertDebugUtilsLabelEXT && "Function <vkQueueInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 15376 # endif 15377 15378 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 15379 } 15380 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15381 15382 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const15383 VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 15384 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15385 { 15386 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15387 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 15388 } 15389 15390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15391 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const15392 VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 15393 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15394 { 15395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15396 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15397 VULKAN_HPP_ASSERT( d.vkCmdBeginDebugUtilsLabelEXT && "Function <vkCmdBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 15398 # endif 15399 15400 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 15401 } 15402 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15403 15404 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const15405 VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15406 { 15407 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15408 d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); 15409 } 15410 15411 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const15412 VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 15413 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15414 { 15415 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15416 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 15417 } 15418 15419 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15420 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const15421 VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 15422 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15423 { 15424 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15425 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15426 VULKAN_HPP_ASSERT( d.vkCmdInsertDebugUtilsLabelEXT && "Function <vkCmdInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 15427 # endif 15428 15429 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 15430 } 15431 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15432 15433 template <typename Dispatch> 15434 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,Dispatch const & d) const15435 Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, 15436 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15437 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, 15438 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15439 { 15440 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15441 return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, 15442 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), 15443 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15444 reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) ); 15445 } 15446 15447 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15448 template <typename Dispatch> 15449 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15450 Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, 15451 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15452 Dispatch const & d ) const 15453 { 15454 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15455 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15456 VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 15457 # endif 15458 15459 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 15460 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT( 15461 m_instance, 15462 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 15463 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15464 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 15465 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); 15466 15467 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( messenger ) ); 15468 } 15469 15470 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15471 template <typename Dispatch> 15472 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type createDebugUtilsMessengerEXTUnique(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15473 Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, 15474 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15475 Dispatch const & d ) const 15476 { 15477 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15478 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15479 VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 15480 # endif 15481 15482 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 15483 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT( 15484 m_instance, 15485 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 15486 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15487 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 15488 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); 15489 15490 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 15491 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( messenger, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 15492 } 15493 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15494 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15495 15496 template <typename Dispatch> destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const15497 VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 15498 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15499 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15500 { 15501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15502 d.vkDestroyDebugUtilsMessengerEXT( 15503 m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 15504 } 15505 15506 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15507 template <typename Dispatch> destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15508 VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 15509 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15510 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15511 { 15512 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15513 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15514 VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 15515 # endif 15516 15517 d.vkDestroyDebugUtilsMessengerEXT( 15518 m_instance, 15519 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 15520 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15521 } 15522 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15523 15524 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const15525 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 15526 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15528 { 15529 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15530 d.vkDestroyDebugUtilsMessengerEXT( 15531 m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 15532 } 15533 15534 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15535 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15536 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 15537 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15538 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15539 { 15540 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15541 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15542 VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 15543 # endif 15544 15545 d.vkDestroyDebugUtilsMessengerEXT( 15546 m_instance, 15547 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 15548 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15549 } 15550 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15551 15552 template <typename Dispatch> submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,Dispatch const & d) const15553 VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 15554 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 15555 const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, 15556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15557 { 15558 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15559 d.vkSubmitDebugUtilsMessageEXT( m_instance, 15560 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 15561 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 15562 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) ); 15563 } 15564 15565 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15566 template <typename Dispatch> submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,Dispatch const & d) const15567 VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 15568 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 15569 const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, 15570 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15571 { 15572 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15573 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15574 VULKAN_HPP_ASSERT( d.vkSubmitDebugUtilsMessageEXT && "Function <vkSubmitDebugUtilsMessageEXT> requires <VK_EXT_debug_utils>" ); 15575 # endif 15576 15577 d.vkSubmitDebugUtilsMessageEXT( m_instance, 15578 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 15579 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 15580 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) ); 15581 } 15582 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15583 15584 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 15585 //=== VK_ANDROID_external_memory_android_hardware_buffer === 15586 15587 template <typename Dispatch> 15588 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer * buffer,VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,Dispatch const & d) const15589 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, 15590 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, 15591 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15592 { 15593 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15594 return static_cast<Result>( 15595 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) ); 15596 } 15597 15598 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15599 template <typename Dispatch> 15600 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const15601 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 15602 { 15603 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15604 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15605 VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && 15606 "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); 15607 # endif 15608 15609 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; 15610 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15611 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 15612 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 15613 15614 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15615 } 15616 15617 template <typename X, typename Y, typename... Z, typename Dispatch> 15618 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const15619 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 15620 { 15621 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15622 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15623 VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && 15624 "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); 15625 # endif 15626 15627 StructureChain<X, Y, Z...> structureChain; 15628 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = 15629 structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>(); 15630 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15631 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 15632 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 15633 15634 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 15635 } 15636 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15637 15638 template <typename Dispatch> 15639 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryAndroidHardwareBufferANDROID(const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer,Dispatch const & d) const15640 Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, 15641 struct AHardwareBuffer ** pBuffer, 15642 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15643 { 15644 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15645 return static_cast<Result>( 15646 d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) ); 15647 } 15648 15649 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15650 template <typename Dispatch> 15651 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type getMemoryAndroidHardwareBufferANDROID(const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info,Dispatch const & d) const15652 Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const 15653 { 15654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15655 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15656 VULKAN_HPP_ASSERT( d.vkGetMemoryAndroidHardwareBufferANDROID && 15657 "Function <vkGetMemoryAndroidHardwareBufferANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); 15658 # endif 15659 15660 struct AHardwareBuffer * buffer; 15661 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15662 d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) ); 15663 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); 15664 15665 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); 15666 } 15667 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15668 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 15669 15670 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 15671 //=== VK_AMDX_shader_enqueue === 15672 15673 template <typename Dispatch> 15674 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const15675 Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15676 uint32_t createInfoCount, 15677 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, 15678 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15679 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 15680 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15681 { 15682 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15683 return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, 15684 static_cast<VkPipelineCache>( pipelineCache ), 15685 createInfoCount, 15686 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ), 15687 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15688 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 15689 } 15690 15691 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15692 template <typename PipelineAllocator, typename Dispatch> createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15693 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX( 15694 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15695 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 15696 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15697 Dispatch const & d ) const 15698 { 15699 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15700 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15701 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15702 # endif 15703 15704 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 15705 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15706 m_device, 15707 static_cast<VkPipelineCache>( pipelineCache ), 15708 createInfos.size(), 15709 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 15710 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15711 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15712 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15713 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", 15714 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15715 15716 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 15717 } 15718 15719 template <typename PipelineAllocator, 15720 typename Dispatch, 15721 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const15722 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX( 15723 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15724 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 15725 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15726 PipelineAllocator & pipelineAllocator, 15727 Dispatch const & d ) const 15728 { 15729 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15730 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15731 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15732 # endif 15733 15734 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 15735 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15736 m_device, 15737 static_cast<VkPipelineCache>( pipelineCache ), 15738 createInfos.size(), 15739 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 15740 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15741 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15742 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15743 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", 15744 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15745 15746 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 15747 } 15748 15749 template <typename Dispatch> 15750 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createExecutionGraphPipelineAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15751 Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15752 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, 15753 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15754 Dispatch const & d ) const 15755 { 15756 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15757 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15758 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15759 # endif 15760 15761 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 15762 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15763 m_device, 15764 static_cast<VkPipelineCache>( pipelineCache ), 15765 1, 15766 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), 15767 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15768 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 15769 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15770 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", 15771 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15772 15773 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 15774 } 15775 15776 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15777 template <typename Dispatch, typename PipelineAllocator> 15778 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15779 Device::createExecutionGraphPipelinesAMDXUnique( 15780 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15781 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 15782 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15783 Dispatch const & d ) const 15784 { 15785 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15786 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15787 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15788 # endif 15789 15790 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 15791 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15792 m_device, 15793 static_cast<VkPipelineCache>( pipelineCache ), 15794 createInfos.size(), 15795 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 15796 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15797 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15798 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15799 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", 15800 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15801 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 15802 uniquePipelines.reserve( createInfos.size() ); 15803 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 15804 for ( auto const & pipeline : pipelines ) 15805 { 15806 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 15807 } 15808 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 15809 } 15810 15811 template < 15812 typename Dispatch, 15813 typename PipelineAllocator, 15814 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 15815 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const15816 Device::createExecutionGraphPipelinesAMDXUnique( 15817 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15818 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 15819 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15820 PipelineAllocator & pipelineAllocator, 15821 Dispatch const & d ) const 15822 { 15823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15824 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15825 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15826 # endif 15827 15828 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 15829 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15830 m_device, 15831 static_cast<VkPipelineCache>( pipelineCache ), 15832 createInfos.size(), 15833 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 15834 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15835 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15836 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15837 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", 15838 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15839 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 15840 uniquePipelines.reserve( createInfos.size() ); 15841 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 15842 for ( auto const & pipeline : pipelines ) 15843 { 15844 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 15845 } 15846 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 15847 } 15848 15849 template <typename Dispatch> 15850 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createExecutionGraphPipelineAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15851 Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15852 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, 15853 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15854 Dispatch const & d ) const 15855 { 15856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15857 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15858 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15859 # endif 15860 15861 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 15862 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15863 m_device, 15864 static_cast<VkPipelineCache>( pipelineCache ), 15865 1, 15866 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), 15867 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15868 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 15869 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15870 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", 15871 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15872 15873 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 15874 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 15875 } 15876 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15877 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15878 15879 template <typename Dispatch> 15880 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExecutionGraphPipelineScratchSizeAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,Dispatch const & d) const15881 Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, 15882 VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, 15883 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15884 { 15885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15886 return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( 15887 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) ); 15888 } 15889 15890 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15891 template <typename Dispatch> 15892 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::type getExecutionGraphPipelineScratchSizeAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,Dispatch const & d) const15893 Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const 15894 { 15895 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15896 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15897 VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineScratchSizeAMDX && 15898 "Function <vkGetExecutionGraphPipelineScratchSizeAMDX> requires <VK_AMDX_shader_enqueue>" ); 15899 # endif 15900 15901 VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; 15902 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( 15903 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) ) ); 15904 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); 15905 15906 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sizeInfo ) ); 15907 } 15908 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15909 15910 template <typename Dispatch> 15911 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExecutionGraphPipelineNodeIndexAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,uint32_t * pNodeIndex,Dispatch const & d) const15912 Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, 15913 const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, 15914 uint32_t * pNodeIndex, 15915 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15916 { 15917 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15918 return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( 15919 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), pNodeIndex ) ); 15920 } 15921 15922 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15923 template <typename Dispatch> getExecutionGraphPipelineNodeIndexAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo,Dispatch const & d) const15924 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type Device::getExecutionGraphPipelineNodeIndexAMDX( 15925 VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const 15926 { 15927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15928 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15929 VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineNodeIndexAMDX && "Function <vkGetExecutionGraphPipelineNodeIndexAMDX> requires <VK_AMDX_shader_enqueue>" ); 15930 # endif 15931 15932 uint32_t nodeIndex; 15933 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( 15934 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ), &nodeIndex ) ); 15935 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); 15936 15937 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( nodeIndex ) ); 15938 } 15939 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15940 15941 template <typename Dispatch> initializeGraphScratchMemoryAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,Dispatch const & d) const15942 VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15943 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15944 { 15945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15946 d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ) ); 15947 } 15948 15949 template <typename Dispatch> dispatchGraphAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,Dispatch const & d) const15950 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15951 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, 15952 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15953 { 15954 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15955 d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); 15956 } 15957 15958 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15959 template <typename Dispatch> dispatchGraphAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,Dispatch const & d) const15960 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15961 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, 15962 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15963 { 15964 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15965 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15966 VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" ); 15967 # endif 15968 15969 d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); 15970 } 15971 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15972 15973 template <typename Dispatch> dispatchGraphIndirectAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,Dispatch const & d) const15974 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15975 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, 15976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15977 { 15978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15979 d.vkCmdDispatchGraphIndirectAMDX( 15980 m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); 15981 } 15982 15983 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15984 template <typename Dispatch> dispatchGraphIndirectAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,Dispatch const & d) const15985 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15986 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, 15987 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15988 { 15989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15990 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15991 VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" ); 15992 # endif 15993 15994 d.vkCmdDispatchGraphIndirectAMDX( 15995 m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); 15996 } 15997 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15998 15999 template <typename Dispatch> dispatchGraphIndirectCountAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,Dispatch const & d) const16000 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 16001 VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, 16002 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16003 { 16004 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16005 d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) ); 16006 } 16007 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 16008 16009 //=== VK_EXT_sample_locations === 16010 16011 template <typename Dispatch> setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,Dispatch const & d) const16012 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, 16013 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16014 { 16015 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16016 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) ); 16017 } 16018 16019 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16020 template <typename Dispatch> setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,Dispatch const & d) const16021 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, 16022 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16023 { 16024 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16025 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16026 VULKAN_HPP_ASSERT( d.vkCmdSetSampleLocationsEXT && "Function <vkCmdSetSampleLocationsEXT> requires <VK_EXT_sample_locations>" ); 16027 # endif 16028 16029 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) ); 16030 } 16031 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16032 16033 template <typename Dispatch> getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,Dispatch const & d) const16034 VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 16035 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, 16036 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16037 { 16038 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16039 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( 16040 m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) ); 16041 } 16042 16043 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16044 template <typename Dispatch> 16045 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,Dispatch const & d) const16046 PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16047 { 16048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16049 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16050 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMultisamplePropertiesEXT && 16051 "Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> requires <VK_EXT_sample_locations>" ); 16052 # endif 16053 16054 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; 16055 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( 16056 m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) ); 16057 16058 return multisampleProperties; 16059 } 16060 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16061 16062 //=== VK_KHR_get_memory_requirements2 === 16063 16064 template <typename Dispatch> getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const16065 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 16066 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 16067 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16068 { 16069 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16070 d.vkGetImageMemoryRequirements2KHR( 16071 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 16072 } 16073 16074 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16075 template <typename Dispatch> 16076 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const16077 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16078 { 16079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16080 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16081 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && 16082 "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16083 # endif 16084 16085 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 16086 d.vkGetImageMemoryRequirements2KHR( 16087 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16088 16089 return memoryRequirements; 16090 } 16091 16092 template <typename X, typename Y, typename... Z, typename Dispatch> 16093 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const16094 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16095 { 16096 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16097 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16098 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && 16099 "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16100 # endif 16101 16102 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 16103 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 16104 d.vkGetImageMemoryRequirements2KHR( 16105 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16106 16107 return structureChain; 16108 } 16109 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16110 16111 template <typename Dispatch> getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const16112 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 16113 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 16114 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16115 { 16116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16117 d.vkGetBufferMemoryRequirements2KHR( 16118 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 16119 } 16120 16121 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16122 template <typename Dispatch> 16123 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const16124 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16125 { 16126 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16127 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16128 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && 16129 "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16130 # endif 16131 16132 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 16133 d.vkGetBufferMemoryRequirements2KHR( 16134 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16135 16136 return memoryRequirements; 16137 } 16138 16139 template <typename X, typename Y, typename... Z, typename Dispatch> 16140 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const16141 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16142 { 16143 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16144 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16145 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && 16146 "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16147 # endif 16148 16149 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 16150 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 16151 d.vkGetBufferMemoryRequirements2KHR( 16152 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16153 16154 return structureChain; 16155 } 16156 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16157 16158 template <typename Dispatch> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const16159 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 16160 uint32_t * pSparseMemoryRequirementCount, 16161 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 16162 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16163 { 16164 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16165 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 16166 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 16167 pSparseMemoryRequirementCount, 16168 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 16169 } 16170 16171 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16172 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 16173 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const16174 Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const 16175 { 16176 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16177 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16178 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && 16179 "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16180 # endif 16181 16182 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 16183 uint32_t sparseMemoryRequirementCount; 16184 d.vkGetImageSparseMemoryRequirements2KHR( 16185 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 16186 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 16187 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 16188 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 16189 &sparseMemoryRequirementCount, 16190 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 16191 16192 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 16193 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 16194 { 16195 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 16196 } 16197 return sparseMemoryRequirements; 16198 } 16199 16200 template <typename SparseImageMemoryRequirements2Allocator, 16201 typename Dispatch, 16202 typename std::enable_if< 16203 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 16204 int>::type> 16205 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const16206 Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, 16207 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 16208 Dispatch const & d ) const 16209 { 16210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16211 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16212 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && 16213 "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16214 # endif 16215 16216 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 16217 sparseImageMemoryRequirements2Allocator ); 16218 uint32_t sparseMemoryRequirementCount; 16219 d.vkGetImageSparseMemoryRequirements2KHR( 16220 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 16221 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 16222 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 16223 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 16224 &sparseMemoryRequirementCount, 16225 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 16226 16227 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 16228 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 16229 { 16230 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 16231 } 16232 return sparseMemoryRequirements; 16233 } 16234 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16235 16236 //=== VK_KHR_acceleration_structure === 16237 16238 template <typename Dispatch> 16239 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,Dispatch const & d) const16240 Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, 16241 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16242 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, 16243 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16244 { 16245 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16246 return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, 16247 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), 16248 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 16249 reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) ); 16250 } 16251 16252 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16253 template <typename Dispatch> 16254 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16255 Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, 16256 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16257 Dispatch const & d ) const 16258 { 16259 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16260 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16261 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16262 # endif 16263 16264 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 16265 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR( 16266 m_device, 16267 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 16268 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16269 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 16270 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); 16271 16272 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); 16273 } 16274 16275 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16276 template <typename Dispatch> 16277 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type createAccelerationStructureKHRUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16278 Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, 16279 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16280 Dispatch const & d ) const 16281 { 16282 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16283 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16284 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16285 # endif 16286 16287 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 16288 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR( 16289 m_device, 16290 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 16291 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16292 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 16293 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); 16294 16295 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 16296 result, 16297 UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 16298 } 16299 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 16300 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16301 16302 template <typename Dispatch> destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16303 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 16304 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16305 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16306 { 16307 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16308 d.vkDestroyAccelerationStructureKHR( 16309 m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16310 } 16311 16312 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16313 template <typename Dispatch> destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16314 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 16315 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16316 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16317 { 16318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16319 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16320 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16321 # endif 16322 16323 d.vkDestroyAccelerationStructureKHR( 16324 m_device, 16325 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 16326 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16327 } 16328 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16329 16330 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16331 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 16332 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16333 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16334 { 16335 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16336 d.vkDestroyAccelerationStructureKHR( 16337 m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16338 } 16339 16340 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16341 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16342 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 16343 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16344 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16345 { 16346 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16347 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16348 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16349 # endif 16350 16351 d.vkDestroyAccelerationStructureKHR( 16352 m_device, 16353 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 16354 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16355 } 16356 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16357 16358 template <typename Dispatch> 16359 VULKAN_HPP_INLINE void buildAccelerationStructuresKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const16360 CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, 16361 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 16362 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 16363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16364 { 16365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16366 d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, 16367 infoCount, 16368 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 16369 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ); 16370 } 16371 16372 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16373 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const16374 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( 16375 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 16376 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 16377 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 16378 { 16379 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16380 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16381 VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresKHR && "Function <vkCmdBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" ); 16382 # endif 16383 # ifdef VULKAN_HPP_NO_EXCEPTIONS 16384 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 16385 # else 16386 if ( infos.size() != pBuildRangeInfos.size() ) 16387 { 16388 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 16389 } 16390 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 16391 16392 d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, 16393 infos.size(), 16394 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 16395 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ); 16396 } 16397 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16398 16399 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,const uint32_t * pIndirectStrides,const uint32_t * const * ppMaxPrimitiveCounts,Dispatch const & d) const16400 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, 16401 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 16402 const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, 16403 const uint32_t * pIndirectStrides, 16404 const uint32_t * const * ppMaxPrimitiveCounts, 16405 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16406 { 16407 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16408 d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, 16409 infoCount, 16410 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 16411 reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ), 16412 pIndirectStrides, 16413 ppMaxPrimitiveCounts ); 16414 } 16415 16416 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16417 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,Dispatch const & d) const16418 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( 16419 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 16420 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, 16421 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides, 16422 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts, 16423 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 16424 { 16425 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16426 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16427 VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresIndirectKHR && 16428 "Function <vkCmdBuildAccelerationStructuresIndirectKHR> requires <VK_KHR_acceleration_structure>" ); 16429 # endif 16430 # ifdef VULKAN_HPP_NO_EXCEPTIONS 16431 VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); 16432 VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); 16433 VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); 16434 # else 16435 if ( infos.size() != indirectDeviceAddresses.size() ) 16436 { 16437 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); 16438 } 16439 if ( infos.size() != indirectStrides.size() ) 16440 { 16441 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); 16442 } 16443 if ( infos.size() != pMaxPrimitiveCounts.size() ) 16444 { 16445 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); 16446 } 16447 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 16448 16449 d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, 16450 infos.size(), 16451 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 16452 reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ), 16453 indirectStrides.data(), 16454 pMaxPrimitiveCounts.data() ); 16455 } 16456 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16457 16458 template <typename Dispatch> 16459 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const16460 Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16461 uint32_t infoCount, 16462 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 16463 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 16464 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16465 { 16466 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16467 return static_cast<Result>( 16468 d.vkBuildAccelerationStructuresKHR( m_device, 16469 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16470 infoCount, 16471 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 16472 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) ); 16473 } 16474 16475 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16476 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const16477 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( 16478 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16479 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 16480 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 16481 Dispatch const & d ) const 16482 { 16483 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16484 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16485 VULKAN_HPP_ASSERT( d.vkBuildAccelerationStructuresKHR && "Function <vkBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" ); 16486 # endif 16487 # ifdef VULKAN_HPP_NO_EXCEPTIONS 16488 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 16489 # else 16490 if ( infos.size() != pBuildRangeInfos.size() ) 16491 { 16492 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 16493 } 16494 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 16495 16496 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16497 d.vkBuildAccelerationStructuresKHR( m_device, 16498 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16499 infos.size(), 16500 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 16501 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) ); 16502 VULKAN_HPP_NAMESPACE::detail::resultCheck( 16503 result, 16504 VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", 16505 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 16506 16507 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 16508 } 16509 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16510 16511 template <typename Dispatch> copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const16512 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16513 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 16514 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16515 { 16516 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16517 return static_cast<Result>( d.vkCopyAccelerationStructureKHR( 16518 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) ); 16519 } 16520 16521 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16522 template <typename Dispatch> 16523 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const16524 Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16525 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, 16526 Dispatch const & d ) const 16527 { 16528 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16529 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16530 VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureKHR && "Function <vkCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16531 # endif 16532 16533 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureKHR( 16534 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) ); 16535 VULKAN_HPP_NAMESPACE::detail::resultCheck( 16536 result, 16537 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", 16538 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 16539 16540 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 16541 } 16542 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16543 16544 template <typename Dispatch> 16545 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const16546 Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16547 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 16548 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16549 { 16550 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16551 return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( 16552 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) ); 16553 } 16554 16555 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16556 template <typename Dispatch> 16557 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const16558 Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16559 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, 16560 Dispatch const & d ) const 16561 { 16562 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16563 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16564 VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureToMemoryKHR && 16565 "Function <vkCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" ); 16566 # endif 16567 16568 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureToMemoryKHR( 16569 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) ); 16570 VULKAN_HPP_NAMESPACE::detail::resultCheck( 16571 result, 16572 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", 16573 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 16574 16575 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 16576 } 16577 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16578 16579 template <typename Dispatch> 16580 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const16581 Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16582 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 16583 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16584 { 16585 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16586 return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( 16587 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) ); 16588 } 16589 16590 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16591 template <typename Dispatch> 16592 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const16593 Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16594 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, 16595 Dispatch const & d ) const 16596 { 16597 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16599 VULKAN_HPP_ASSERT( d.vkCopyMemoryToAccelerationStructureKHR && 16600 "Function <vkCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16601 # endif 16602 16603 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToAccelerationStructureKHR( 16604 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) ); 16605 VULKAN_HPP_NAMESPACE::detail::resultCheck( 16606 result, 16607 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", 16608 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 16609 16610 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 16611 } 16612 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16613 16614 template <typename Dispatch> 16615 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,void * pData,size_t stride,Dispatch const & d) const16616 Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, 16617 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 16618 VULKAN_HPP_NAMESPACE::QueryType queryType, 16619 size_t dataSize, 16620 void * pData, 16621 size_t stride, 16622 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16623 { 16624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16625 return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 16626 accelerationStructureCount, 16627 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 16628 static_cast<VkQueryType>( queryType ), 16629 dataSize, 16630 pData, 16631 stride ) ); 16632 } 16633 16634 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16635 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 16636 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeAccelerationStructuresPropertiesKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,size_t stride,Dispatch const & d) const16637 Device::writeAccelerationStructuresPropertiesKHR( 16638 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 16639 VULKAN_HPP_NAMESPACE::QueryType queryType, 16640 size_t dataSize, 16641 size_t stride, 16642 Dispatch const & d ) const 16643 { 16644 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16645 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16646 VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && 16647 "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); 16648 # endif 16649 16650 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 16651 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 16652 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16653 d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 16654 accelerationStructures.size(), 16655 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 16656 static_cast<VkQueryType>( queryType ), 16657 data.size() * sizeof( DataType ), 16658 reinterpret_cast<void *>( data.data() ), 16659 stride ) ); 16660 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); 16661 16662 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 16663 } 16664 16665 template <typename DataType, typename Dispatch> writeAccelerationStructuresPropertyKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t stride,Dispatch const & d) const16666 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeAccelerationStructuresPropertyKHR( 16667 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 16668 VULKAN_HPP_NAMESPACE::QueryType queryType, 16669 size_t stride, 16670 Dispatch const & d ) const 16671 { 16672 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16673 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16674 VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && 16675 "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); 16676 # endif 16677 16678 DataType data; 16679 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16680 d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 16681 accelerationStructures.size(), 16682 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 16683 static_cast<VkQueryType>( queryType ), 16684 sizeof( DataType ), 16685 reinterpret_cast<void *>( &data ), 16686 stride ) ); 16687 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); 16688 16689 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 16690 } 16691 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16692 16693 template <typename Dispatch> copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const16694 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 16695 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16696 { 16697 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16698 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ); 16699 } 16700 16701 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16702 template <typename Dispatch> copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const16703 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, 16704 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16705 { 16706 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16707 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16708 VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureKHR && "Function <vkCmdCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16709 # endif 16710 16711 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ); 16712 } 16713 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16714 16715 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const16716 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 16717 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16718 { 16719 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16720 d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ); 16721 } 16722 16723 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16724 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const16725 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, 16726 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16727 { 16728 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16729 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16730 VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureToMemoryKHR && 16731 "Function <vkCmdCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" ); 16732 # endif 16733 16734 d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ); 16735 } 16736 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16737 16738 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const16739 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 16740 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16741 { 16742 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16743 d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ); 16744 } 16745 16746 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16747 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const16748 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, 16749 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16750 { 16751 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16752 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16753 VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToAccelerationStructureKHR && 16754 "Function <vkCmdCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16755 # endif 16756 16757 d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ); 16758 } 16759 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16760 16761 template <typename Dispatch> getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,Dispatch const & d) const16762 VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, 16763 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16764 { 16765 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16766 return static_cast<DeviceAddress>( 16767 d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) ); 16768 } 16769 16770 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16771 template <typename Dispatch> 16772 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,Dispatch const & d) const16773 Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, 16774 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16775 { 16776 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16777 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16778 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureDeviceAddressKHR && 16779 "Function <vkGetAccelerationStructureDeviceAddressKHR> requires <VK_KHR_acceleration_structure>" ); 16780 # endif 16781 16782 VkDeviceAddress result = 16783 d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) ); 16784 16785 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 16786 } 16787 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16788 16789 template <typename Dispatch> 16790 VULKAN_HPP_INLINE void writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const16791 CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, 16792 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 16793 VULKAN_HPP_NAMESPACE::QueryType queryType, 16794 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 16795 uint32_t firstQuery, 16796 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16797 { 16798 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16799 d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, 16800 accelerationStructureCount, 16801 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 16802 static_cast<VkQueryType>( queryType ), 16803 static_cast<VkQueryPool>( queryPool ), 16804 firstQuery ); 16805 } 16806 16807 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16808 template <typename Dispatch> writeAccelerationStructuresPropertiesKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const16809 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( 16810 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 16811 VULKAN_HPP_NAMESPACE::QueryType queryType, 16812 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 16813 uint32_t firstQuery, 16814 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16815 { 16816 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16817 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16818 VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesKHR && 16819 "Function <vkCmdWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); 16820 # endif 16821 16822 d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, 16823 accelerationStructures.size(), 16824 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 16825 static_cast<VkQueryType>( queryType ), 16826 static_cast<VkQueryPool>( queryPool ), 16827 firstQuery ); 16828 } 16829 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16830 16831 template <typename Dispatch> getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,Dispatch const & d) const16832 VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, 16833 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, 16834 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16835 { 16836 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16837 d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, 16838 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ), 16839 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); 16840 } 16841 16842 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16843 template <typename Dispatch> 16844 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,Dispatch const & d) const16845 Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, 16846 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16847 { 16848 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16849 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16850 VULKAN_HPP_ASSERT( d.vkGetDeviceAccelerationStructureCompatibilityKHR && 16851 "Function <vkGetDeviceAccelerationStructureCompatibilityKHR> requires <VK_KHR_acceleration_structure>" ); 16852 # endif 16853 16854 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; 16855 d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, 16856 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ), 16857 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); 16858 16859 return compatibility; 16860 } 16861 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16862 16863 template <typename Dispatch> getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,const uint32_t * pMaxPrimitiveCounts,VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,Dispatch const & d) const16864 VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 16865 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, 16866 const uint32_t * pMaxPrimitiveCounts, 16867 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, 16868 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16869 { 16870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16871 d.vkGetAccelerationStructureBuildSizesKHR( m_device, 16872 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 16873 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ), 16874 pMaxPrimitiveCounts, 16875 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) ); 16876 } 16877 16878 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16879 template <typename Dispatch> 16880 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts,Dispatch const & d) const16881 Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 16882 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, 16883 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts, 16884 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 16885 { 16886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16887 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16888 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureBuildSizesKHR && 16889 "Function <vkGetAccelerationStructureBuildSizesKHR> requires <VK_KHR_acceleration_structure>" ); 16890 # endif 16891 # ifdef VULKAN_HPP_NO_EXCEPTIONS 16892 VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); 16893 # else 16894 if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) 16895 { 16896 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); 16897 } 16898 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 16899 16900 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; 16901 d.vkGetAccelerationStructureBuildSizesKHR( m_device, 16902 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 16903 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ), 16904 maxPrimitiveCounts.data(), 16905 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) ); 16906 16907 return sizeInfo; 16908 } 16909 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16910 16911 //=== VK_KHR_ray_tracing_pipeline === 16912 16913 template <typename Dispatch> traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const16914 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 16915 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 16916 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 16917 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 16918 uint32_t width, 16919 uint32_t height, 16920 uint32_t depth, 16921 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16922 { 16923 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16924 d.vkCmdTraceRaysKHR( m_commandBuffer, 16925 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 16926 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 16927 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 16928 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 16929 width, 16930 height, 16931 depth ); 16932 } 16933 16934 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16935 template <typename Dispatch> traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const16936 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 16937 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, 16938 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, 16939 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, 16940 uint32_t width, 16941 uint32_t height, 16942 uint32_t depth, 16943 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16944 { 16945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16946 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16947 VULKAN_HPP_ASSERT( d.vkCmdTraceRaysKHR && "Function <vkCmdTraceRaysKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16948 # endif 16949 16950 d.vkCmdTraceRaysKHR( m_commandBuffer, 16951 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 16952 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 16953 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 16954 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 16955 width, 16956 height, 16957 depth ); 16958 } 16959 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16960 16961 template <typename Dispatch> 16962 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const16963 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16964 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16965 uint32_t createInfoCount, 16966 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, 16967 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16968 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 16969 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16970 { 16971 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16972 return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, 16973 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16974 static_cast<VkPipelineCache>( pipelineCache ), 16975 createInfoCount, 16976 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ), 16977 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 16978 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 16979 } 16980 16981 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16982 template <typename PipelineAllocator, typename Dispatch> 16983 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16984 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16985 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16986 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 16987 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16988 Dispatch const & d ) const 16989 { 16990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16991 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16992 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16993 # endif 16994 16995 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 16996 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 16997 m_device, 16998 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16999 static_cast<VkPipelineCache>( pipelineCache ), 17000 createInfos.size(), 17001 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17002 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17003 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17004 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17005 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 17006 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17007 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17008 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17009 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17010 17011 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 17012 } 17013 17014 template <typename PipelineAllocator, 17015 typename Dispatch, 17016 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 17017 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const17018 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17019 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17020 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17021 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17022 PipelineAllocator & pipelineAllocator, 17023 Dispatch const & d ) const 17024 { 17025 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17026 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17027 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17028 # endif 17029 17030 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 17031 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17032 m_device, 17033 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17034 static_cast<VkPipelineCache>( pipelineCache ), 17035 createInfos.size(), 17036 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17037 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17038 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17039 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17040 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 17041 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17042 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17043 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17044 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17045 17046 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 17047 } 17048 17049 template <typename Dispatch> 17050 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17051 Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17052 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17053 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 17054 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17055 Dispatch const & d ) const 17056 { 17057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17058 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17059 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17060 # endif 17061 17062 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 17063 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17064 m_device, 17065 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17066 static_cast<VkPipelineCache>( pipelineCache ), 17067 1, 17068 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 17069 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17070 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 17071 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17072 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", 17073 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17074 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17075 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17076 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17077 17078 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 17079 } 17080 17081 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17082 template <typename Dispatch, typename PipelineAllocator> 17083 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17084 Device::createRayTracingPipelinesKHRUnique( 17085 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17086 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17087 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17088 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17089 Dispatch const & d ) const 17090 { 17091 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17092 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17093 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17094 # endif 17095 17096 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 17097 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17098 m_device, 17099 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17100 static_cast<VkPipelineCache>( pipelineCache ), 17101 createInfos.size(), 17102 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17103 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17104 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17105 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17106 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 17107 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17108 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17109 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17110 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17111 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 17112 uniquePipelines.reserve( createInfos.size() ); 17113 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 17114 for ( auto const & pipeline : pipelines ) 17115 { 17116 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 17117 } 17118 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 17119 } 17120 17121 template < 17122 typename Dispatch, 17123 typename PipelineAllocator, 17124 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 17125 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const17126 Device::createRayTracingPipelinesKHRUnique( 17127 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17128 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17129 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17130 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17131 PipelineAllocator & pipelineAllocator, 17132 Dispatch const & d ) const 17133 { 17134 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17135 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17136 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17137 # endif 17138 17139 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 17140 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17141 m_device, 17142 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17143 static_cast<VkPipelineCache>( pipelineCache ), 17144 createInfos.size(), 17145 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17146 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17147 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17148 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17149 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 17150 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17151 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17152 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17153 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17154 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 17155 uniquePipelines.reserve( createInfos.size() ); 17156 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 17157 for ( auto const & pipeline : pipelines ) 17158 { 17159 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 17160 } 17161 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 17162 } 17163 17164 template <typename Dispatch> 17165 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17166 Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17167 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17168 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 17169 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17170 Dispatch const & d ) const 17171 { 17172 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17173 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17174 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17175 # endif 17176 17177 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 17178 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17179 m_device, 17180 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17181 static_cast<VkPipelineCache>( pipelineCache ), 17182 1, 17183 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 17184 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17185 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 17186 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17187 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", 17188 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17189 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17190 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17191 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17192 17193 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 17194 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 17195 } 17196 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17197 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17198 17199 template <typename Dispatch> getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const17200 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17201 uint32_t firstGroup, 17202 uint32_t groupCount, 17203 size_t dataSize, 17204 void * pData, 17205 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17206 { 17207 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17208 return static_cast<Result>( 17209 d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 17210 } 17211 17212 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17213 template <typename DataType, typename DataTypeAllocator, typename Dispatch> getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const17214 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesKHR( 17215 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 17216 { 17217 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17218 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17219 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && 17220 "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 17221 # endif 17222 17223 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 17224 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 17225 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 17226 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 17227 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); 17228 17229 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17230 } 17231 17232 template <typename DataType, typename Dispatch> 17233 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const17234 Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 17235 { 17236 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17237 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17238 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && 17239 "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 17240 # endif 17241 17242 DataType data; 17243 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 17244 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 17245 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); 17246 17247 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17248 } 17249 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17250 17251 template <typename Dispatch> getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const17252 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17253 uint32_t firstGroup, 17254 uint32_t groupCount, 17255 size_t dataSize, 17256 void * pData, 17257 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17258 { 17259 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17260 return static_cast<Result>( 17261 d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 17262 } 17263 17264 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17265 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 17266 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const17267 Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( 17268 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 17269 { 17270 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17271 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17272 VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && 17273 "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17274 # endif 17275 17276 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 17277 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 17278 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 17279 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 17280 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); 17281 17282 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17283 } 17284 17285 template <typename DataType, typename Dispatch> getRayTracingCaptureReplayShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const17286 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( 17287 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 17288 { 17289 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17290 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17291 VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && 17292 "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17293 # endif 17294 17295 DataType data; 17296 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 17297 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 17298 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); 17299 17300 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17301 } 17302 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17303 17304 template <typename Dispatch> traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const17305 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 17306 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 17307 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 17308 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 17309 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 17310 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17311 { 17312 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17313 d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, 17314 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 17315 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 17316 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 17317 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 17318 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 17319 } 17320 17321 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17322 template <typename Dispatch> traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const17323 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 17324 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, 17325 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, 17326 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, 17327 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 17328 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17329 { 17330 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17331 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17332 VULKAN_HPP_ASSERT( d.vkCmdTraceRaysIndirectKHR && "Function <vkCmdTraceRaysIndirectKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17333 # endif 17334 17335 d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, 17336 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 17337 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 17338 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 17339 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 17340 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 17341 } 17342 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17343 17344 template <typename Dispatch> getRayTracingShaderGroupStackSizeKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t group,VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,Dispatch const & d) const17345 VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17346 uint32_t group, 17347 VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, 17348 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17349 { 17350 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17351 return static_cast<DeviceSize>( 17352 d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) ); 17353 } 17354 17355 template <typename Dispatch> setRayTracingPipelineStackSizeKHR(uint32_t pipelineStackSize,Dispatch const & d) const17356 VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17357 { 17358 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17359 d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); 17360 } 17361 17362 //=== VK_KHR_sampler_ycbcr_conversion === 17363 17364 template <typename Dispatch> 17365 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const17366 Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 17367 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17368 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 17369 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17370 { 17371 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17372 return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, 17373 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 17374 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17375 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 17376 } 17377 17378 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17379 template <typename Dispatch> 17380 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17381 Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 17382 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17383 Dispatch const & d ) const 17384 { 17385 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17386 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17387 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && 17388 "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 17389 # endif 17390 17391 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 17392 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR( 17393 m_device, 17394 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 17395 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17396 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 17397 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); 17398 17399 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); 17400 } 17401 17402 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17403 template <typename Dispatch> 17404 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionKHRUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17405 Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 17406 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17407 Dispatch const & d ) const 17408 { 17409 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17410 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17411 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && 17412 "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 17413 # endif 17414 17415 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 17416 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR( 17417 m_device, 17418 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 17419 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17420 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 17421 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); 17422 17423 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 17424 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 17425 } 17426 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17427 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17428 17429 template <typename Dispatch> destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const17430 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 17431 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17432 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17433 { 17434 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17435 d.vkDestroySamplerYcbcrConversionKHR( 17436 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 17437 } 17438 17439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17440 template <typename Dispatch> destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17441 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 17442 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17443 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17444 { 17445 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17446 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17447 VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversionKHR && 17448 "Function <vkDestroySamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 17449 # endif 17450 17451 d.vkDestroySamplerYcbcrConversionKHR( 17452 m_device, 17453 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 17454 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 17455 } 17456 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17457 17458 //=== VK_KHR_bind_memory2 === 17459 17460 template <typename Dispatch> bindBufferMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const17461 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, 17462 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 17463 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17464 { 17465 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17466 return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 17467 } 17468 17469 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17470 template <typename Dispatch> 17471 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const17472 Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, 17473 Dispatch const & d ) const 17474 { 17475 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17476 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17477 VULKAN_HPP_ASSERT( d.vkBindBufferMemory2KHR && "Function <vkBindBufferMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 17478 # endif 17479 17480 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17481 d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 17482 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); 17483 17484 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 17485 } 17486 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17487 17488 template <typename Dispatch> bindImageMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const17489 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, 17490 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 17491 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17492 { 17493 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17494 return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 17495 } 17496 17497 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17498 template <typename Dispatch> 17499 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const17500 Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const 17501 { 17502 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17503 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17504 VULKAN_HPP_ASSERT( d.vkBindImageMemory2KHR && "Function <vkBindImageMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 17505 # endif 17506 17507 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17508 d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 17509 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); 17510 17511 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 17512 } 17513 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17514 17515 //=== VK_EXT_image_drm_format_modifier === 17516 17517 template <typename Dispatch> getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,Dispatch const & d) const17518 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( 17519 VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17520 { 17521 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17522 return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 17523 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) ); 17524 } 17525 17526 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17527 template <typename Dispatch> 17528 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const17529 Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 17530 { 17531 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17532 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17533 VULKAN_HPP_ASSERT( d.vkGetImageDrmFormatModifierPropertiesEXT && 17534 "Function <vkGetImageDrmFormatModifierPropertiesEXT> requires <VK_EXT_image_drm_format_modifier>" ); 17535 # endif 17536 17537 VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; 17538 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 17539 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) ); 17540 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); 17541 17542 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 17543 } 17544 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17545 17546 //=== VK_EXT_validation_cache === 17547 17548 template <typename Dispatch> createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,Dispatch const & d) const17549 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, 17550 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17551 VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, 17552 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17553 { 17554 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17555 return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, 17556 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), 17557 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17558 reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) ); 17559 } 17560 17561 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17562 template <typename Dispatch> 17563 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17564 Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, 17565 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17566 Dispatch const & d ) const 17567 { 17568 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17569 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17570 VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 17571 # endif 17572 17573 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 17574 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT( 17575 m_device, 17576 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 17577 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17578 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 17579 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); 17580 17581 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( validationCache ) ); 17582 } 17583 17584 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17585 template <typename Dispatch> 17586 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type createValidationCacheEXTUnique(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17587 Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, 17588 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17589 Dispatch const & d ) const 17590 { 17591 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17592 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17593 VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 17594 # endif 17595 17596 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 17597 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT( 17598 m_device, 17599 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 17600 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17601 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 17602 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); 17603 17604 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 17605 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( validationCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 17606 } 17607 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17608 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17609 17610 template <typename Dispatch> destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const17611 VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17612 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17613 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17614 { 17615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17616 d.vkDestroyValidationCacheEXT( 17617 m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 17618 } 17619 17620 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17621 template <typename Dispatch> destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17622 VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17623 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17624 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17625 { 17626 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17627 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17628 VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 17629 # endif 17630 17631 d.vkDestroyValidationCacheEXT( 17632 m_device, 17633 static_cast<VkValidationCacheEXT>( validationCache ), 17634 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 17635 } 17636 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17637 17638 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const17639 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17640 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17641 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17642 { 17643 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17644 d.vkDestroyValidationCacheEXT( 17645 m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 17646 } 17647 17648 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17649 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17650 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17651 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17652 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17653 { 17654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17655 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17656 VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 17657 # endif 17658 17659 d.vkDestroyValidationCacheEXT( 17660 m_device, 17661 static_cast<VkValidationCacheEXT>( validationCache ), 17662 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 17663 } 17664 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17665 17666 template <typename Dispatch> mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,Dispatch const & d) const17667 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 17668 uint32_t srcCacheCount, 17669 const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, 17670 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17671 { 17672 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17673 return static_cast<Result>( d.vkMergeValidationCachesEXT( 17674 m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) ); 17675 } 17676 17677 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17678 template <typename Dispatch> 17679 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,Dispatch const & d) const17680 Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 17681 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, 17682 Dispatch const & d ) const 17683 { 17684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17685 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17686 VULKAN_HPP_ASSERT( d.vkMergeValidationCachesEXT && "Function <vkMergeValidationCachesEXT> requires <VK_EXT_validation_cache>" ); 17687 # endif 17688 17689 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergeValidationCachesEXT( 17690 m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) ); 17691 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); 17692 17693 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 17694 } 17695 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17696 17697 template <typename Dispatch> getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,size_t * pDataSize,void * pData,Dispatch const & d) const17698 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17699 size_t * pDataSize, 17700 void * pData, 17701 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17702 { 17703 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17704 return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) ); 17705 } 17706 17707 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17708 template <typename Uint8_tAllocator, typename Dispatch> 17709 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Dispatch const & d) const17710 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const 17711 { 17712 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17713 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17714 VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" ); 17715 # endif 17716 17717 std::vector<uint8_t, Uint8_tAllocator> data; 17718 size_t dataSize; 17719 VULKAN_HPP_NAMESPACE::Result result; 17720 do 17721 { 17722 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17723 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 17724 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 17725 { 17726 data.resize( dataSize ); 17727 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17728 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 17729 } 17730 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17731 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 17732 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 17733 if ( dataSize < data.size() ) 17734 { 17735 data.resize( dataSize ); 17736 } 17737 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17738 } 17739 17740 template <typename Uint8_tAllocator, 17741 typename Dispatch, 17742 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 17743 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const17744 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 17745 { 17746 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17747 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17748 VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" ); 17749 # endif 17750 17751 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 17752 size_t dataSize; 17753 VULKAN_HPP_NAMESPACE::Result result; 17754 do 17755 { 17756 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17757 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 17758 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 17759 { 17760 data.resize( dataSize ); 17761 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17762 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 17763 } 17764 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17765 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 17766 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 17767 if ( dataSize < data.size() ) 17768 { 17769 data.resize( dataSize ); 17770 } 17771 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17772 } 17773 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17774 17775 //=== VK_NV_shading_rate_image === 17776 17777 template <typename Dispatch> bindShadingRateImageNV(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,Dispatch const & d) const17778 VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, 17779 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 17780 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17781 { 17782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17783 d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); 17784 } 17785 17786 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,Dispatch const & d) const17787 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, 17788 uint32_t viewportCount, 17789 const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, 17790 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17791 { 17792 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17793 d.vkCmdSetViewportShadingRatePaletteNV( 17794 m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) ); 17795 } 17796 17797 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17798 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,Dispatch const & d) const17799 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( 17800 uint32_t firstViewport, 17801 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, 17802 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17803 { 17804 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17805 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17806 VULKAN_HPP_ASSERT( d.vkCmdSetViewportShadingRatePaletteNV && "Function <vkCmdSetViewportShadingRatePaletteNV> requires <VK_NV_shading_rate_image>" ); 17807 # endif 17808 17809 d.vkCmdSetViewportShadingRatePaletteNV( 17810 m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) ); 17811 } 17812 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17813 17814 template <typename Dispatch> setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,uint32_t customSampleOrderCount,const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,Dispatch const & d) const17815 VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 17816 uint32_t customSampleOrderCount, 17817 const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, 17818 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17819 { 17820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17821 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, 17822 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 17823 customSampleOrderCount, 17824 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) ); 17825 } 17826 17827 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17828 template <typename Dispatch> 17829 VULKAN_HPP_INLINE void setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,Dispatch const & d) const17830 CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 17831 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, 17832 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17833 { 17834 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17835 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17836 VULKAN_HPP_ASSERT( d.vkCmdSetCoarseSampleOrderNV && "Function <vkCmdSetCoarseSampleOrderNV> requires <VK_NV_shading_rate_image>" ); 17837 # endif 17838 17839 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, 17840 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 17841 customSampleOrders.size(), 17842 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) ); 17843 } 17844 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17845 17846 //=== VK_NV_ray_tracing === 17847 17848 template <typename Dispatch> 17849 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,Dispatch const & d) const17850 Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, 17851 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17852 VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, 17853 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17854 { 17855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17856 return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, 17857 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), 17858 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17859 reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) ); 17860 } 17861 17862 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17863 template <typename Dispatch> 17864 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17865 Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, 17866 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17867 Dispatch const & d ) const 17868 { 17869 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17870 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17871 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 17872 # endif 17873 17874 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 17875 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV( 17876 m_device, 17877 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 17878 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17879 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 17880 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); 17881 17882 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); 17883 } 17884 17885 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17886 template <typename Dispatch> 17887 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type createAccelerationStructureNVUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17888 Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, 17889 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17890 Dispatch const & d ) const 17891 { 17892 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17893 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17894 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 17895 # endif 17896 17897 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 17898 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV( 17899 m_device, 17900 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 17901 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17902 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 17903 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); 17904 17905 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 17906 result, 17907 UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 17908 } 17909 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17911 17912 template <typename Dispatch> destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const17913 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 17914 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17915 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17916 { 17917 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17918 d.vkDestroyAccelerationStructureNV( 17919 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 17920 } 17921 17922 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17923 template <typename Dispatch> destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17924 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 17925 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17926 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17927 { 17928 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17929 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17930 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 17931 # endif 17932 17933 d.vkDestroyAccelerationStructureNV( 17934 m_device, 17935 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 17936 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 17937 } 17938 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17939 17940 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const17941 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 17942 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17943 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17944 { 17945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17946 d.vkDestroyAccelerationStructureNV( 17947 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 17948 } 17949 17950 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17951 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17952 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 17953 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17954 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17955 { 17956 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17957 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17958 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 17959 # endif 17960 17961 d.vkDestroyAccelerationStructureNV( 17962 m_device, 17963 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 17964 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 17965 } 17966 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17967 17968 template <typename Dispatch> 17969 VULKAN_HPP_INLINE void getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,Dispatch const & d) const17970 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, 17971 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, 17972 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17973 { 17974 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17975 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 17976 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), 17977 reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) ); 17978 } 17979 17980 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17981 template <typename Dispatch> 17982 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const17983 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, 17984 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17985 { 17986 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17987 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17988 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && 17989 "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" ); 17990 # endif 17991 17992 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; 17993 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 17994 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 17995 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 17996 17997 return memoryRequirements; 17998 } 17999 18000 template <typename X, typename Y, typename... Z, typename Dispatch> 18001 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const18002 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, 18003 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18004 { 18005 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18006 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18007 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && 18008 "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" ); 18009 # endif 18010 18011 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 18012 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>(); 18013 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 18014 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 18015 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 18016 18017 return structureChain; 18018 } 18019 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18020 18021 template <typename Dispatch> bindAccelerationStructureMemoryNV(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,Dispatch const & d) const18022 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( 18023 uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18024 { 18025 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18026 return static_cast<Result>( 18027 d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) ); 18028 } 18029 18030 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18031 template <typename Dispatch> bindAccelerationStructureMemoryNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,Dispatch const & d) const18032 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( 18033 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const 18034 { 18035 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18036 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18037 VULKAN_HPP_ASSERT( d.vkBindAccelerationStructureMemoryNV && "Function <vkBindAccelerationStructureMemoryNV> requires <VK_NV_ray_tracing>" ); 18038 # endif 18039 18040 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindAccelerationStructureMemoryNV( 18041 m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) ); 18042 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); 18043 18044 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18045 } 18046 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18047 18048 template <typename Dispatch> buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const18049 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, 18050 VULKAN_HPP_NAMESPACE::Buffer instanceData, 18051 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 18052 VULKAN_HPP_NAMESPACE::Bool32 update, 18053 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 18054 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 18055 VULKAN_HPP_NAMESPACE::Buffer scratch, 18056 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 18057 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18058 { 18059 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18060 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, 18061 reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), 18062 static_cast<VkBuffer>( instanceData ), 18063 static_cast<VkDeviceSize>( instanceOffset ), 18064 static_cast<VkBool32>( update ), 18065 static_cast<VkAccelerationStructureNV>( dst ), 18066 static_cast<VkAccelerationStructureNV>( src ), 18067 static_cast<VkBuffer>( scratch ), 18068 static_cast<VkDeviceSize>( scratchOffset ) ); 18069 } 18070 18071 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18072 template <typename Dispatch> buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const18073 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, 18074 VULKAN_HPP_NAMESPACE::Buffer instanceData, 18075 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 18076 VULKAN_HPP_NAMESPACE::Bool32 update, 18077 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 18078 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 18079 VULKAN_HPP_NAMESPACE::Buffer scratch, 18080 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 18081 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18082 { 18083 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18084 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18085 VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructureNV && "Function <vkCmdBuildAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 18086 # endif 18087 18088 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, 18089 reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), 18090 static_cast<VkBuffer>( instanceData ), 18091 static_cast<VkDeviceSize>( instanceOffset ), 18092 static_cast<VkBool32>( update ), 18093 static_cast<VkAccelerationStructureNV>( dst ), 18094 static_cast<VkAccelerationStructureNV>( src ), 18095 static_cast<VkBuffer>( scratch ), 18096 static_cast<VkDeviceSize>( scratchOffset ) ); 18097 } 18098 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18099 18100 template <typename Dispatch> copyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,Dispatch const & d) const18101 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 18102 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 18103 VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, 18104 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18105 { 18106 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18107 d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, 18108 static_cast<VkAccelerationStructureNV>( dst ), 18109 static_cast<VkAccelerationStructureNV>( src ), 18110 static_cast<VkCopyAccelerationStructureModeKHR>( mode ) ); 18111 } 18112 18113 template <typename Dispatch> traceRaysNV(VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const18114 VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, 18115 VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, 18116 VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, 18117 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, 18118 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, 18119 VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, 18120 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, 18121 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, 18122 VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, 18123 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, 18124 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, 18125 uint32_t width, 18126 uint32_t height, 18127 uint32_t depth, 18128 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18129 { 18130 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18131 d.vkCmdTraceRaysNV( m_commandBuffer, 18132 static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), 18133 static_cast<VkDeviceSize>( raygenShaderBindingOffset ), 18134 static_cast<VkBuffer>( missShaderBindingTableBuffer ), 18135 static_cast<VkDeviceSize>( missShaderBindingOffset ), 18136 static_cast<VkDeviceSize>( missShaderBindingStride ), 18137 static_cast<VkBuffer>( hitShaderBindingTableBuffer ), 18138 static_cast<VkDeviceSize>( hitShaderBindingOffset ), 18139 static_cast<VkDeviceSize>( hitShaderBindingStride ), 18140 static_cast<VkBuffer>( callableShaderBindingTableBuffer ), 18141 static_cast<VkDeviceSize>( callableShaderBindingOffset ), 18142 static_cast<VkDeviceSize>( callableShaderBindingStride ), 18143 width, 18144 height, 18145 depth ); 18146 } 18147 18148 template <typename Dispatch> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const18149 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18150 uint32_t createInfoCount, 18151 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, 18152 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18153 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 18154 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18155 { 18156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18157 return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, 18158 static_cast<VkPipelineCache>( pipelineCache ), 18159 createInfoCount, 18160 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), 18161 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 18162 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 18163 } 18164 18165 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18166 template <typename PipelineAllocator, typename Dispatch> 18167 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18168 Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18169 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 18170 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18171 Dispatch const & d ) const 18172 { 18173 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18174 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18175 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18176 # endif 18177 18178 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 18179 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18180 m_device, 18181 static_cast<VkPipelineCache>( pipelineCache ), 18182 createInfos.size(), 18183 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 18184 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18185 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 18186 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18187 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 18188 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18189 18190 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 18191 } 18192 18193 template <typename PipelineAllocator, 18194 typename Dispatch, 18195 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 18196 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const18197 Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18198 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 18199 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18200 PipelineAllocator & pipelineAllocator, 18201 Dispatch const & d ) const 18202 { 18203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18204 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18205 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18206 # endif 18207 18208 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 18209 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18210 m_device, 18211 static_cast<VkPipelineCache>( pipelineCache ), 18212 createInfos.size(), 18213 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 18214 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18215 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 18216 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18217 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 18218 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18219 18220 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 18221 } 18222 18223 template <typename Dispatch> 18224 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18225 Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18226 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 18227 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18228 Dispatch const & d ) const 18229 { 18230 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18231 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18232 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18233 # endif 18234 18235 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 18236 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18237 m_device, 18238 static_cast<VkPipelineCache>( pipelineCache ), 18239 1, 18240 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 18241 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18242 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 18243 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18244 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", 18245 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18246 18247 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 18248 } 18249 18250 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18251 template <typename Dispatch, typename PipelineAllocator> 18252 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18253 Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18254 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 18255 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18256 Dispatch const & d ) const 18257 { 18258 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18259 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18260 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18261 # endif 18262 18263 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 18264 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18265 m_device, 18266 static_cast<VkPipelineCache>( pipelineCache ), 18267 createInfos.size(), 18268 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 18269 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18270 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 18271 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18272 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 18273 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18274 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 18275 uniquePipelines.reserve( createInfos.size() ); 18276 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 18277 for ( auto const & pipeline : pipelines ) 18278 { 18279 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 18280 } 18281 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 18282 } 18283 18284 template < 18285 typename Dispatch, 18286 typename PipelineAllocator, 18287 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 18288 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const18289 Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18290 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 18291 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18292 PipelineAllocator & pipelineAllocator, 18293 Dispatch const & d ) const 18294 { 18295 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18296 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18297 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18298 # endif 18299 18300 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 18301 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18302 m_device, 18303 static_cast<VkPipelineCache>( pipelineCache ), 18304 createInfos.size(), 18305 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 18306 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18307 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 18308 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18309 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 18310 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18311 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 18312 uniquePipelines.reserve( createInfos.size() ); 18313 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 18314 for ( auto const & pipeline : pipelines ) 18315 { 18316 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 18317 } 18318 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 18319 } 18320 18321 template <typename Dispatch> 18322 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18323 Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18324 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 18325 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18326 Dispatch const & d ) const 18327 { 18328 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18329 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18330 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18331 # endif 18332 18333 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 18334 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18335 m_device, 18336 static_cast<VkPipelineCache>( pipelineCache ), 18337 1, 18338 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 18339 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18340 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 18341 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18342 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", 18343 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18344 18345 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 18346 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 18347 } 18348 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18349 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18350 18351 template <typename Dispatch> getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const18352 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 18353 uint32_t firstGroup, 18354 uint32_t groupCount, 18355 size_t dataSize, 18356 void * pData, 18357 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18358 { 18359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18360 return static_cast<Result>( 18361 d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 18362 } 18363 18364 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18365 template <typename DataType, typename DataTypeAllocator, typename Dispatch> getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const18366 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesNV( 18367 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 18368 { 18369 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18370 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18371 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && 18372 "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 18373 # endif 18374 18375 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 18376 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 18377 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV( 18378 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 18379 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); 18380 18381 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 18382 } 18383 18384 template <typename DataType, typename Dispatch> 18385 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const18386 Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 18387 { 18388 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18389 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18390 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && 18391 "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 18392 # endif 18393 18394 DataType data; 18395 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV( 18396 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 18397 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); 18398 18399 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 18400 } 18401 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18402 18403 template <typename Dispatch> getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,void * pData,Dispatch const & d) const18404 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 18405 size_t dataSize, 18406 void * pData, 18407 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18408 { 18409 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18410 return static_cast<Result>( 18411 d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) ); 18412 } 18413 18414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18415 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 18416 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,Dispatch const & d) const18417 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const 18418 { 18419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18420 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18421 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" ); 18422 # endif 18423 18424 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 18425 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 18426 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV( 18427 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 18428 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 18429 18430 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 18431 } 18432 18433 template <typename DataType, typename Dispatch> 18434 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Dispatch const & d) const18435 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const 18436 { 18437 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18438 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18439 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" ); 18440 # endif 18441 18442 DataType data; 18443 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV( 18444 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 18445 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 18446 18447 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 18448 } 18449 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18450 18451 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const18452 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, 18453 const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, 18454 VULKAN_HPP_NAMESPACE::QueryType queryType, 18455 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 18456 uint32_t firstQuery, 18457 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18458 { 18459 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18460 d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, 18461 accelerationStructureCount, 18462 reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ), 18463 static_cast<VkQueryType>( queryType ), 18464 static_cast<VkQueryPool>( queryPool ), 18465 firstQuery ); 18466 } 18467 18468 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18469 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const18470 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( 18471 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, 18472 VULKAN_HPP_NAMESPACE::QueryType queryType, 18473 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 18474 uint32_t firstQuery, 18475 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18476 { 18477 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18478 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18479 VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesNV && 18480 "Function <vkCmdWriteAccelerationStructuresPropertiesNV> requires <VK_NV_ray_tracing>" ); 18481 # endif 18482 18483 d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, 18484 accelerationStructures.size(), 18485 reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ), 18486 static_cast<VkQueryType>( queryType ), 18487 static_cast<VkQueryPool>( queryPool ), 18488 firstQuery ); 18489 } 18490 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18491 18492 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 18493 template <typename Dispatch> compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const18494 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 18495 uint32_t shader, 18496 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18497 { 18498 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18499 return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); 18500 } 18501 #else 18502 template <typename Dispatch> 18503 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const18504 Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const 18505 { 18506 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18507 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18508 VULKAN_HPP_ASSERT( d.vkCompileDeferredNV && "Function <vkCompileDeferredNV> requires <VK_NV_ray_tracing>" ); 18509 # endif 18510 18511 VULKAN_HPP_NAMESPACE::Result result = 18512 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); 18513 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); 18514 18515 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18516 } 18517 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18518 18519 //=== VK_KHR_maintenance3 === 18520 18521 template <typename Dispatch> getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const18522 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 18523 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 18524 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18525 { 18526 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18527 d.vkGetDescriptorSetLayoutSupportKHR( 18528 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 18529 } 18530 18531 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18532 template <typename Dispatch> 18533 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const18534 Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 18535 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18536 { 18537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18538 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18539 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && 18540 "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 18541 # endif 18542 18543 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 18544 d.vkGetDescriptorSetLayoutSupportKHR( 18545 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 18546 18547 return support; 18548 } 18549 18550 template <typename X, typename Y, typename... Z, typename Dispatch> 18551 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const18552 Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 18553 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18554 { 18555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18556 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18557 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && 18558 "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 18559 # endif 18560 18561 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 18562 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 18563 d.vkGetDescriptorSetLayoutSupportKHR( 18564 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 18565 18566 return structureChain; 18567 } 18568 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18569 18570 //=== VK_KHR_draw_indirect_count === 18571 18572 template <typename Dispatch> drawIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const18573 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 18574 VULKAN_HPP_NAMESPACE::DeviceSize offset, 18575 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 18576 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 18577 uint32_t maxDrawCount, 18578 uint32_t stride, 18579 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18580 { 18581 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18582 d.vkCmdDrawIndirectCountKHR( m_commandBuffer, 18583 static_cast<VkBuffer>( buffer ), 18584 static_cast<VkDeviceSize>( offset ), 18585 static_cast<VkBuffer>( countBuffer ), 18586 static_cast<VkDeviceSize>( countBufferOffset ), 18587 maxDrawCount, 18588 stride ); 18589 } 18590 18591 template <typename Dispatch> drawIndexedIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const18592 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 18593 VULKAN_HPP_NAMESPACE::DeviceSize offset, 18594 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 18595 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 18596 uint32_t maxDrawCount, 18597 uint32_t stride, 18598 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18599 { 18600 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18601 d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, 18602 static_cast<VkBuffer>( buffer ), 18603 static_cast<VkDeviceSize>( offset ), 18604 static_cast<VkBuffer>( countBuffer ), 18605 static_cast<VkDeviceSize>( countBufferOffset ), 18606 maxDrawCount, 18607 stride ); 18608 } 18609 18610 //=== VK_EXT_external_memory_host === 18611 18612 template <typename Dispatch> 18613 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,Dispatch const & d) const18614 Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 18615 const void * pHostPointer, 18616 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, 18617 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18618 { 18619 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18620 return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, 18621 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 18622 pHostPointer, 18623 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) ); 18624 } 18625 18626 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18627 template <typename Dispatch> 18628 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,Dispatch const & d) const18629 Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 18630 const void * pHostPointer, 18631 Dispatch const & d ) const 18632 { 18633 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18634 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18635 VULKAN_HPP_ASSERT( d.vkGetMemoryHostPointerPropertiesEXT && "Function <vkGetMemoryHostPointerPropertiesEXT> requires <VK_EXT_external_memory_host>" ); 18636 # endif 18637 18638 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; 18639 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18640 d.vkGetMemoryHostPointerPropertiesEXT( m_device, 18641 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 18642 pHostPointer, 18643 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) ); 18644 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); 18645 18646 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryHostPointerProperties ) ); 18647 } 18648 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18649 18650 //=== VK_AMD_buffer_marker === 18651 18652 template <typename Dispatch> writeBufferMarkerAMD(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const18653 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 18654 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 18655 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 18656 uint32_t marker, 18657 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18658 { 18659 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18660 d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, 18661 static_cast<VkPipelineStageFlagBits>( pipelineStage ), 18662 static_cast<VkBuffer>( dstBuffer ), 18663 static_cast<VkDeviceSize>( dstOffset ), 18664 marker ); 18665 } 18666 18667 //=== VK_EXT_calibrated_timestamps === 18668 18669 template <typename Dispatch> getCalibrateableTimeDomainsEXT(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,Dispatch const & d) const18670 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, 18671 VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, 18672 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18673 { 18674 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18675 return static_cast<Result>( 18676 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); 18677 } 18678 18679 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18680 template <typename TimeDomainKHRAllocator, typename Dispatch> 18681 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const & d) const18682 PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const 18683 { 18684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18685 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18686 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && 18687 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18688 # endif 18689 18690 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; 18691 uint32_t timeDomainCount; 18692 VULKAN_HPP_NAMESPACE::Result result; 18693 do 18694 { 18695 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 18696 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 18697 { 18698 timeDomains.resize( timeDomainCount ); 18699 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18700 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 18701 } 18702 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18703 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 18704 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 18705 if ( timeDomainCount < timeDomains.size() ) 18706 { 18707 timeDomains.resize( timeDomainCount ); 18708 } 18709 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 18710 } 18711 18712 template <typename TimeDomainKHRAllocator, 18713 typename Dispatch, 18714 typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 18715 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsEXT(TimeDomainKHRAllocator & timeDomainKHRAllocator,Dispatch const & d) const18716 PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const 18717 { 18718 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18719 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18720 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && 18721 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18722 # endif 18723 18724 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); 18725 uint32_t timeDomainCount; 18726 VULKAN_HPP_NAMESPACE::Result result; 18727 do 18728 { 18729 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 18730 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 18731 { 18732 timeDomains.resize( timeDomainCount ); 18733 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18734 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 18735 } 18736 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18737 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 18738 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 18739 if ( timeDomainCount < timeDomains.size() ) 18740 { 18741 timeDomains.resize( timeDomainCount ); 18742 } 18743 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 18744 } 18745 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18746 18747 template <typename Dispatch> getCalibratedTimestampsEXT(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const18748 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, 18749 const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, 18750 uint64_t * pTimestamps, 18751 uint64_t * pMaxDeviation, 18752 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18753 { 18754 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18755 return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( 18756 m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); 18757 } 18758 18759 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18760 template <typename Uint64_tAllocator, typename Dispatch> 18761 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Dispatch const & d) const18762 Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 18763 Dispatch const & d ) const 18764 { 18765 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18766 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18767 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && 18768 "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18769 # endif 18770 18771 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 18772 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); 18773 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 18774 uint64_t & maxDeviation = data_.second; 18775 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT( 18776 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 18777 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 18778 18779 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 18780 } 18781 18782 template <typename Uint64_tAllocator, 18783 typename Dispatch, 18784 typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type> 18785 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const18786 Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 18787 Uint64_tAllocator & uint64_tAllocator, 18788 Dispatch const & d ) const 18789 { 18790 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18791 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18792 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && 18793 "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18794 # endif 18795 18796 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 18797 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); 18798 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 18799 uint64_t & maxDeviation = data_.second; 18800 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT( 18801 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 18802 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 18803 18804 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 18805 } 18806 18807 template <typename Dispatch> 18808 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type getCalibratedTimestampEXT(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo,Dispatch const & d) const18809 Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const 18810 { 18811 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18812 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18813 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && 18814 "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18815 # endif 18816 18817 std::pair<uint64_t, uint64_t> data_; 18818 uint64_t & timestamp = data_.first; 18819 uint64_t & maxDeviation = data_.second; 18820 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18821 d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); 18822 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); 18823 18824 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 18825 } 18826 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18827 18828 //=== VK_NV_mesh_shader === 18829 18830 template <typename Dispatch> drawMeshTasksNV(uint32_t taskCount,uint32_t firstTask,Dispatch const & d) const18831 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18832 { 18833 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18834 d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); 18835 } 18836 18837 template <typename Dispatch> drawMeshTasksIndirectNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const18838 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 18839 VULKAN_HPP_NAMESPACE::DeviceSize offset, 18840 uint32_t drawCount, 18841 uint32_t stride, 18842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18843 { 18844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18845 d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 18846 } 18847 18848 template <typename Dispatch> drawMeshTasksIndirectCountNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const18849 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 18850 VULKAN_HPP_NAMESPACE::DeviceSize offset, 18851 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 18852 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 18853 uint32_t maxDrawCount, 18854 uint32_t stride, 18855 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18856 { 18857 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18858 d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, 18859 static_cast<VkBuffer>( buffer ), 18860 static_cast<VkDeviceSize>( offset ), 18861 static_cast<VkBuffer>( countBuffer ), 18862 static_cast<VkDeviceSize>( countBufferOffset ), 18863 maxDrawCount, 18864 stride ); 18865 } 18866 18867 //=== VK_NV_scissor_exclusive === 18868 18869 template <typename Dispatch> setExclusiveScissorEnableNV(uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables,Dispatch const & d) const18870 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, 18871 uint32_t exclusiveScissorCount, 18872 const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, 18873 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18874 { 18875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18876 d.vkCmdSetExclusiveScissorEnableNV( 18877 m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) ); 18878 } 18879 18880 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18881 template <typename Dispatch> 18882 VULKAN_HPP_INLINE void setExclusiveScissorEnableNV(uint32_t firstExclusiveScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables,Dispatch const & d) const18883 CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, 18884 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables, 18885 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18886 { 18887 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18888 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18889 VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorEnableNV && "Function <vkCmdSetExclusiveScissorEnableNV> requires <VK_NV_scissor_exclusive>" ); 18890 # endif 18891 18892 d.vkCmdSetExclusiveScissorEnableNV( 18893 m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast<const VkBool32 *>( exclusiveScissorEnables.data() ) ); 18894 } 18895 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18896 18897 template <typename Dispatch> setExclusiveScissorNV(uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,Dispatch const & d) const18898 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 18899 uint32_t exclusiveScissorCount, 18900 const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, 18901 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18902 { 18903 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18904 d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) ); 18905 } 18906 18907 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18908 template <typename Dispatch> setExclusiveScissorNV(uint32_t firstExclusiveScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,Dispatch const & d) const18909 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 18910 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, 18911 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18912 { 18913 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18914 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18915 VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorNV && "Function <vkCmdSetExclusiveScissorNV> requires <VK_NV_scissor_exclusive>" ); 18916 # endif 18917 18918 d.vkCmdSetExclusiveScissorNV( 18919 m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) ); 18920 } 18921 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18922 18923 //=== VK_NV_device_diagnostic_checkpoints === 18924 18925 template <typename Dispatch> setCheckpointNV(const void * pCheckpointMarker,Dispatch const & d) const18926 VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18927 { 18928 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18929 d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); 18930 } 18931 18932 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18933 template <typename CheckpointMarkerType, typename Dispatch> setCheckpointNV(CheckpointMarkerType const & checkpointMarker,Dispatch const & d) const18934 VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18935 { 18936 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18937 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18938 VULKAN_HPP_ASSERT( d.vkCmdSetCheckpointNV && "Function <vkCmdSetCheckpointNV> requires <VK_NV_device_diagnostic_checkpoints>" ); 18939 # endif 18940 18941 d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) ); 18942 } 18943 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18944 18945 template <typename Dispatch> getCheckpointDataNV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,Dispatch const & d) const18946 VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, 18947 VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, 18948 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18949 { 18950 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18951 d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) ); 18952 } 18953 18954 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18955 template <typename CheckpointDataNVAllocator, typename Dispatch> 18956 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(Dispatch const & d) const18957 Queue::getCheckpointDataNV( Dispatch const & d ) const 18958 { 18959 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18960 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18961 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" ); 18962 # endif 18963 18964 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData; 18965 uint32_t checkpointDataCount; 18966 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 18967 checkpointData.resize( checkpointDataCount ); 18968 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 18969 18970 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 18971 if ( checkpointDataCount < checkpointData.size() ) 18972 { 18973 checkpointData.resize( checkpointDataCount ); 18974 } 18975 return checkpointData; 18976 } 18977 18978 template <typename CheckpointDataNVAllocator, 18979 typename Dispatch, 18980 typename std::enable_if<std::is_same<typename CheckpointDataNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, int>::type> 18981 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(CheckpointDataNVAllocator & checkpointDataNVAllocator,Dispatch const & d) const18982 Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const 18983 { 18984 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18985 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18986 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" ); 18987 # endif 18988 18989 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator ); 18990 uint32_t checkpointDataCount; 18991 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 18992 checkpointData.resize( checkpointDataCount ); 18993 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 18994 18995 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 18996 if ( checkpointDataCount < checkpointData.size() ) 18997 { 18998 checkpointData.resize( checkpointDataCount ); 18999 } 19000 return checkpointData; 19001 } 19002 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19003 19004 //=== VK_KHR_timeline_semaphore === 19005 19006 template <typename Dispatch> getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const19007 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 19008 uint64_t * pValue, 19009 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19010 { 19011 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19012 return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); 19013 } 19014 19015 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19016 template <typename Dispatch> 19017 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const19018 Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const 19019 { 19020 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19021 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19022 VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValueKHR && "Function <vkGetSemaphoreCounterValueKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 19023 # endif 19024 19025 uint64_t value; 19026 VULKAN_HPP_NAMESPACE::Result result = 19027 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 19028 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); 19029 19030 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); 19031 } 19032 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19033 19034 template <typename Dispatch> waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const19035 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 19036 uint64_t timeout, 19037 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19038 { 19039 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19040 return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 19041 } 19042 19043 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19044 template <typename Dispatch> 19045 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const19046 Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const 19047 { 19048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19049 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19050 VULKAN_HPP_ASSERT( d.vkWaitSemaphoresKHR && "Function <vkWaitSemaphoresKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 19051 # endif 19052 19053 VULKAN_HPP_NAMESPACE::Result result = 19054 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 19055 VULKAN_HPP_NAMESPACE::detail::resultCheck( 19056 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 19057 19058 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 19059 } 19060 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19061 19062 template <typename Dispatch> signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const19063 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, 19064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19065 { 19066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19067 return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 19068 } 19069 19070 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19071 template <typename Dispatch> 19072 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,Dispatch const & d) const19073 Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 19074 { 19075 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19076 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19077 VULKAN_HPP_ASSERT( d.vkSignalSemaphoreKHR && "Function <vkSignalSemaphoreKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 19078 # endif 19079 19080 VULKAN_HPP_NAMESPACE::Result result = 19081 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 19082 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); 19083 19084 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19085 } 19086 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19087 19088 //=== VK_INTEL_performance_query === 19089 19090 template <typename Dispatch> initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,Dispatch const & d) const19091 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( 19092 const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19093 { 19094 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19095 return static_cast<Result>( 19096 d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) ); 19097 } 19098 19099 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19100 template <typename Dispatch> 19101 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo,Dispatch const & d) const19102 Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const 19103 { 19104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19105 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19106 VULKAN_HPP_ASSERT( d.vkInitializePerformanceApiINTEL && "Function <vkInitializePerformanceApiINTEL> requires <VK_INTEL_performance_query>" ); 19107 # endif 19108 19109 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19110 d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) ); 19111 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); 19112 19113 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19114 } 19115 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19116 19117 template <typename Dispatch> uninitializePerformanceApiINTEL(Dispatch const & d) const19118 VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19119 { 19120 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19121 d.vkUninitializePerformanceApiINTEL( m_device ); 19122 } 19123 19124 template <typename Dispatch> setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const19125 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, 19126 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19127 { 19128 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19129 return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) ); 19130 } 19131 19132 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19133 template <typename Dispatch> 19134 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo,Dispatch const & d) const19135 CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const 19136 { 19137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19138 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19139 VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceMarkerINTEL && "Function <vkCmdSetPerformanceMarkerINTEL> requires <VK_INTEL_performance_query>" ); 19140 # endif 19141 19142 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19143 d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) ); 19144 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); 19145 19146 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19147 } 19148 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19149 19150 template <typename Dispatch> setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const19151 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( 19152 const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19153 { 19154 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19155 return static_cast<Result>( 19156 d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) ); 19157 } 19158 19159 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19160 template <typename Dispatch> 19161 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo,Dispatch const & d) const19162 CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const 19163 { 19164 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19165 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19166 VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceStreamMarkerINTEL && "Function <vkCmdSetPerformanceStreamMarkerINTEL> requires <VK_INTEL_performance_query>" ); 19167 # endif 19168 19169 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19170 d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) ); 19171 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); 19172 19173 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19174 } 19175 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19176 19177 template <typename Dispatch> setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,Dispatch const & d) const19178 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( 19179 const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19180 { 19181 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19182 return static_cast<Result>( 19183 d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) ); 19184 } 19185 19186 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19187 template <typename Dispatch> 19188 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo,Dispatch const & d) const19189 CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const 19190 { 19191 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19192 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19193 VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceOverrideINTEL && "Function <vkCmdSetPerformanceOverrideINTEL> requires <VK_INTEL_performance_query>" ); 19194 # endif 19195 19196 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19197 d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) ); 19198 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); 19199 19200 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19201 } 19202 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19203 19204 template <typename Dispatch> 19205 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,Dispatch const & d) const19206 Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, 19207 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, 19208 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19209 { 19210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19211 return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, 19212 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), 19213 reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) ); 19214 } 19215 19216 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19217 template <typename Dispatch> 19218 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const19219 Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const 19220 { 19221 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19222 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19223 VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 19224 # endif 19225 19226 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 19227 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19228 d.vkAcquirePerformanceConfigurationINTEL( m_device, 19229 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 19230 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 19231 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); 19232 19233 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( configuration ) ); 19234 } 19235 19236 # ifndef VULKAN_HPP_NO_SMART_HANDLE 19237 template <typename Dispatch> 19238 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type acquirePerformanceConfigurationINTELUnique(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const19239 Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, 19240 Dispatch const & d ) const 19241 { 19242 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19243 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19244 VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 19245 # endif 19246 19247 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 19248 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19249 d.vkAcquirePerformanceConfigurationINTEL( m_device, 19250 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 19251 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 19252 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); 19253 19254 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 19255 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( configuration, ObjectRelease<Device, Dispatch>( *this, d ) ) ); 19256 } 19257 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 19258 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19259 19260 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19261 template <typename Dispatch> releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const19262 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 19263 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19264 { 19265 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19266 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 19267 } 19268 #else 19269 template <typename Dispatch> 19270 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const19271 Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 19272 { 19273 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19274 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19275 VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 19276 # endif 19277 19278 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19279 d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 19280 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); 19281 19282 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19283 } 19284 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 19285 19286 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19287 template <typename Dispatch> release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const19288 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 19289 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19290 { 19291 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19292 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 19293 } 19294 #else 19295 template <typename Dispatch> 19296 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const19297 Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 19298 { 19299 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19300 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19301 VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 19302 # endif 19303 19304 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19305 d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 19306 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); 19307 19308 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19309 } 19310 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 19311 19312 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19313 template <typename Dispatch> setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const19314 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 19315 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19316 { 19317 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19318 return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 19319 } 19320 #else 19321 template <typename Dispatch> 19322 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const19323 Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 19324 { 19325 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19326 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19327 VULKAN_HPP_ASSERT( d.vkQueueSetPerformanceConfigurationINTEL && 19328 "Function <vkQueueSetPerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 19329 # endif 19330 19331 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19332 d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 19333 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); 19334 19335 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19336 } 19337 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 19338 19339 template <typename Dispatch> getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,Dispatch const & d) const19340 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, 19341 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, 19342 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19343 { 19344 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19345 return static_cast<Result>( d.vkGetPerformanceParameterINTEL( 19346 m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) ); 19347 } 19348 19349 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19350 template <typename Dispatch> 19351 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,Dispatch const & d) const19352 Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const 19353 { 19354 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19355 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19356 VULKAN_HPP_ASSERT( d.vkGetPerformanceParameterINTEL && "Function <vkGetPerformanceParameterINTEL> requires <VK_INTEL_performance_query>" ); 19357 # endif 19358 19359 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; 19360 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPerformanceParameterINTEL( 19361 m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) ); 19362 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); 19363 19364 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); 19365 } 19366 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19367 19368 //=== VK_AMD_display_native_hdr === 19369 19370 template <typename Dispatch> setLocalDimmingAMD(VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,Dispatch const & d) const19371 VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, 19372 VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, 19373 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19374 { 19375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19376 d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) ); 19377 } 19378 19379 #if defined( VK_USE_PLATFORM_FUCHSIA ) 19380 //=== VK_FUCHSIA_imagepipe_surface === 19381 19382 template <typename Dispatch> 19383 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createImagePipeSurfaceFUCHSIA(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const19384 Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, 19385 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19386 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 19387 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19388 { 19389 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19390 return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, 19391 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), 19392 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 19393 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 19394 } 19395 19396 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19397 template <typename Dispatch> 19398 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19399 Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 19400 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19401 Dispatch const & d ) const 19402 { 19403 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19404 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19405 VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" ); 19406 # endif 19407 19408 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 19409 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 19410 m_instance, 19411 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 19412 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19413 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 19414 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); 19415 19416 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 19417 } 19418 19419 # ifndef VULKAN_HPP_NO_SMART_HANDLE 19420 template <typename Dispatch> 19421 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19422 Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 19423 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19424 Dispatch const & d ) const 19425 { 19426 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19427 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19428 VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" ); 19429 # endif 19430 19431 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 19432 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 19433 m_instance, 19434 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 19435 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19436 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 19437 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); 19438 19439 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 19440 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 19441 } 19442 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 19443 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19444 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 19445 19446 #if defined( VK_USE_PLATFORM_METAL_EXT ) 19447 //=== VK_EXT_metal_surface === 19448 19449 template <typename Dispatch> createMetalSurfaceEXT(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const19450 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, 19451 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19452 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 19453 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19454 { 19455 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19456 return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, 19457 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), 19458 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 19459 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 19460 } 19461 19462 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19463 template <typename Dispatch> 19464 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19465 Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, 19466 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19467 Dispatch const & d ) const 19468 { 19469 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19470 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19471 VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" ); 19472 # endif 19473 19474 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 19475 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19476 d.vkCreateMetalSurfaceEXT( m_instance, 19477 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 19478 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19479 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 19480 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); 19481 19482 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 19483 } 19484 19485 # ifndef VULKAN_HPP_NO_SMART_HANDLE 19486 template <typename Dispatch> 19487 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMetalSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19488 Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, 19489 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19490 Dispatch const & d ) const 19491 { 19492 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19493 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19494 VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" ); 19495 # endif 19496 19497 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 19498 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19499 d.vkCreateMetalSurfaceEXT( m_instance, 19500 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 19501 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19502 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 19503 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); 19504 19505 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 19506 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 19507 } 19508 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 19509 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19510 #endif /*VK_USE_PLATFORM_METAL_EXT*/ 19511 19512 //=== VK_KHR_fragment_shading_rate === 19513 19514 template <typename Dispatch> 19515 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getFragmentShadingRatesKHR(uint32_t * pFragmentShadingRateCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,Dispatch const & d) const19516 PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, 19517 VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, 19518 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19519 { 19520 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19521 return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 19522 m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) ); 19523 } 19524 19525 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19526 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch> 19527 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19528 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(Dispatch const & d) const19529 PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const 19530 { 19531 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19532 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19533 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && 19534 "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" ); 19535 # endif 19536 19537 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates; 19538 uint32_t fragmentShadingRateCount; 19539 VULKAN_HPP_NAMESPACE::Result result; 19540 do 19541 { 19542 result = 19543 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 19544 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) 19545 { 19546 fragmentShadingRates.resize( fragmentShadingRateCount ); 19547 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 19548 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 19549 } 19550 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19551 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 19552 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 19553 if ( fragmentShadingRateCount < fragmentShadingRates.size() ) 19554 { 19555 fragmentShadingRates.resize( fragmentShadingRateCount ); 19556 } 19557 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); 19558 } 19559 19560 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, 19561 typename Dispatch, 19562 typename std::enable_if<std::is_same<typename PhysicalDeviceFragmentShadingRateKHRAllocator::value_type, 19563 VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value, 19564 int>::type> 19565 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19566 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,Dispatch const & d) const19567 PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, 19568 Dispatch const & d ) const 19569 { 19570 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19571 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19572 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && 19573 "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" ); 19574 # endif 19575 19576 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates( 19577 physicalDeviceFragmentShadingRateKHRAllocator ); 19578 uint32_t fragmentShadingRateCount; 19579 VULKAN_HPP_NAMESPACE::Result result; 19580 do 19581 { 19582 result = 19583 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 19584 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) 19585 { 19586 fragmentShadingRates.resize( fragmentShadingRateCount ); 19587 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 19588 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 19589 } 19590 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19591 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 19592 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 19593 if ( fragmentShadingRateCount < fragmentShadingRates.size() ) 19594 { 19595 fragmentShadingRates.resize( fragmentShadingRateCount ); 19596 } 19597 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); 19598 } 19599 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19600 19601 template <typename Dispatch> setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const19602 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, 19603 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 19604 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19605 { 19606 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19607 d.vkCmdSetFragmentShadingRateKHR( 19608 m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 19609 } 19610 19611 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19612 template <typename Dispatch> setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const19613 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, 19614 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 19615 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19616 { 19617 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19618 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19619 VULKAN_HPP_ASSERT( d.vkCmdSetFragmentShadingRateKHR && "Function <vkCmdSetFragmentShadingRateKHR> requires <VK_KHR_fragment_shading_rate>" ); 19620 # endif 19621 19622 d.vkCmdSetFragmentShadingRateKHR( 19623 m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 19624 } 19625 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19626 19627 //=== VK_KHR_dynamic_rendering_local_read === 19628 19629 template <typename Dispatch> setRenderingAttachmentLocationsKHR(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo,Dispatch const & d) const19630 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, 19631 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19632 { 19633 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19634 d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( pLocationInfo ) ); 19635 } 19636 19637 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19638 template <typename Dispatch> setRenderingAttachmentLocationsKHR(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo,Dispatch const & d) const19639 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, 19640 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19641 { 19642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19644 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocationsKHR && 19645 "Function <vkCmdSetRenderingAttachmentLocationsKHR> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); 19646 # endif 19647 19648 d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( &locationInfo ) ); 19649 } 19650 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19651 19652 template <typename Dispatch> 19653 VULKAN_HPP_INLINE void setRenderingInputAttachmentIndicesKHR(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo,Dispatch const & d) const19654 CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, 19655 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19656 { 19657 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19658 d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( pInputAttachmentIndexInfo ) ); 19659 } 19660 19661 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19662 template <typename Dispatch> 19663 VULKAN_HPP_INLINE void setRenderingInputAttachmentIndicesKHR(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo,Dispatch const & d) const19664 CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, 19665 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19666 { 19667 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19668 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19669 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndicesKHR && 19670 "Function <vkCmdSetRenderingInputAttachmentIndicesKHR> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); 19671 # endif 19672 19673 d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( &inputAttachmentIndexInfo ) ); 19674 } 19675 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19676 19677 //=== VK_EXT_buffer_device_address === 19678 19679 template <typename Dispatch> getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const19680 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 19681 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19682 { 19683 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19684 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 19685 } 19686 19687 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19688 template <typename Dispatch> getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const19689 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 19690 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19691 { 19692 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19693 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19694 VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressEXT && 19695 "Function <vkGetBufferDeviceAddressEXT> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 19696 # endif 19697 19698 VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 19699 19700 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 19701 } 19702 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19703 19704 //=== VK_EXT_tooling_info === 19705 19706 template <typename Dispatch> getToolPropertiesEXT(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,Dispatch const & d) const19707 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, 19708 VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, 19709 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19710 { 19711 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19712 return static_cast<Result>( 19713 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); 19714 } 19715 19716 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19717 template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch> 19718 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19719 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT(Dispatch const & d) const19720 PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const 19721 { 19722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19723 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19724 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && 19725 "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 19726 # endif 19727 19728 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; 19729 uint32_t toolCount; 19730 VULKAN_HPP_NAMESPACE::Result result; 19731 do 19732 { 19733 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 19734 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 19735 { 19736 toolProperties.resize( toolCount ); 19737 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19738 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 19739 } 19740 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19741 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 19742 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 19743 if ( toolCount < toolProperties.size() ) 19744 { 19745 toolProperties.resize( toolCount ); 19746 } 19747 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 19748 } 19749 19750 template < 19751 typename PhysicalDeviceToolPropertiesAllocator, 19752 typename Dispatch, 19753 typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, 19754 int>::type> 19755 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19756 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT(PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,Dispatch const & d) const19757 PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const 19758 { 19759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19760 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19761 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && 19762 "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 19763 # endif 19764 19765 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( 19766 physicalDeviceToolPropertiesAllocator ); 19767 uint32_t toolCount; 19768 VULKAN_HPP_NAMESPACE::Result result; 19769 do 19770 { 19771 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 19772 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 19773 { 19774 toolProperties.resize( toolCount ); 19775 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19776 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 19777 } 19778 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19779 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 19780 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 19781 if ( toolCount < toolProperties.size() ) 19782 { 19783 toolProperties.resize( toolCount ); 19784 } 19785 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 19786 } 19787 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19788 19789 //=== VK_KHR_present_wait === 19790 19791 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19792 template <typename Dispatch> waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t presentId,uint64_t timeout,Dispatch const & d) const19793 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 19794 uint64_t presentId, 19795 uint64_t timeout, 19796 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19797 { 19798 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19799 return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); 19800 } 19801 #else 19802 template <typename Dispatch> 19803 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t presentId,uint64_t timeout,Dispatch const & d) const19804 Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const 19805 { 19806 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19807 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19808 VULKAN_HPP_ASSERT( d.vkWaitForPresentKHR && "Function <vkWaitForPresentKHR> requires <VK_KHR_present_wait>" ); 19809 # endif 19810 19811 VULKAN_HPP_NAMESPACE::Result result = 19812 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); 19813 VULKAN_HPP_NAMESPACE::detail::resultCheck( 19814 result, 19815 VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", 19816 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 19817 19818 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 19819 } 19820 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 19821 19822 //=== VK_NV_cooperative_matrix === 19823 19824 template <typename Dispatch> getCooperativeMatrixPropertiesNV(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,Dispatch const & d) const19825 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( 19826 uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19827 { 19828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19829 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 19830 m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) ); 19831 } 19832 19833 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19834 template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch> 19835 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19836 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const & d) const19837 PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const 19838 { 19839 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19840 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19841 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && 19842 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" ); 19843 # endif 19844 19845 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties; 19846 uint32_t propertyCount; 19847 VULKAN_HPP_NAMESPACE::Result result; 19848 do 19849 { 19850 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 19851 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 19852 { 19853 properties.resize( propertyCount ); 19854 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 19855 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 19856 } 19857 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19858 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 19859 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 19860 if ( propertyCount < properties.size() ) 19861 { 19862 properties.resize( propertyCount ); 19863 } 19864 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 19865 } 19866 19867 template <typename CooperativeMatrixPropertiesNVAllocator, 19868 typename Dispatch, 19869 typename std::enable_if< 19870 std::is_same<typename CooperativeMatrixPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, 19871 int>::type> 19872 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19873 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,Dispatch const & d) const19874 PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, 19875 Dispatch const & d ) const 19876 { 19877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19878 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19879 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && 19880 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" ); 19881 # endif 19882 19883 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties( 19884 cooperativeMatrixPropertiesNVAllocator ); 19885 uint32_t propertyCount; 19886 VULKAN_HPP_NAMESPACE::Result result; 19887 do 19888 { 19889 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 19890 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 19891 { 19892 properties.resize( propertyCount ); 19893 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 19894 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 19895 } 19896 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19897 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 19898 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 19899 if ( propertyCount < properties.size() ) 19900 { 19901 properties.resize( propertyCount ); 19902 } 19903 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 19904 } 19905 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19906 19907 //=== VK_NV_coverage_reduction_mode === 19908 19909 template <typename Dispatch> getSupportedFramebufferMixedSamplesCombinationsNV(uint32_t * pCombinationCount,VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,Dispatch const & d) const19910 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 19911 uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19912 { 19913 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19914 return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 19915 m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) ); 19916 } 19917 19918 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19919 template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch> 19920 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19921 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const & d) const19922 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const 19923 { 19924 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19925 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19926 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && 19927 "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" ); 19928 # endif 19929 19930 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations; 19931 uint32_t combinationCount; 19932 VULKAN_HPP_NAMESPACE::Result result; 19933 do 19934 { 19935 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19936 d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); 19937 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) 19938 { 19939 combinations.resize( combinationCount ); 19940 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 19941 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 19942 } 19943 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19944 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 19945 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 19946 if ( combinationCount < combinations.size() ) 19947 { 19948 combinations.resize( combinationCount ); 19949 } 19950 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); 19951 } 19952 19953 template <typename FramebufferMixedSamplesCombinationNVAllocator, 19954 typename Dispatch, 19955 typename std::enable_if<std::is_same<typename FramebufferMixedSamplesCombinationNVAllocator::value_type, 19956 VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value, 19957 int>::type> 19958 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19959 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,Dispatch const & d) const19960 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 19961 FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const 19962 { 19963 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19964 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19965 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && 19966 "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" ); 19967 # endif 19968 19969 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations( 19970 framebufferMixedSamplesCombinationNVAllocator ); 19971 uint32_t combinationCount; 19972 VULKAN_HPP_NAMESPACE::Result result; 19973 do 19974 { 19975 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19976 d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); 19977 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) 19978 { 19979 combinations.resize( combinationCount ); 19980 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 19981 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 19982 } 19983 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19984 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 19985 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 19986 if ( combinationCount < combinations.size() ) 19987 { 19988 combinations.resize( combinationCount ); 19989 } 19990 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); 19991 } 19992 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19993 19994 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 19995 //=== VK_EXT_full_screen_exclusive === 19996 19997 template <typename Dispatch> 19998 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const19999 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 20000 uint32_t * pPresentModeCount, 20001 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 20002 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20003 { 20004 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20005 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 20006 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 20007 pPresentModeCount, 20008 reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 20009 } 20010 20011 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20012 template <typename PresentModeKHRAllocator, typename Dispatch> 20013 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const20014 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 20015 { 20016 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20017 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20018 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && 20019 "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); 20020 # endif 20021 20022 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes; 20023 uint32_t presentModeCount; 20024 VULKAN_HPP_NAMESPACE::Result result; 20025 do 20026 { 20027 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 20028 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); 20029 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 20030 { 20031 presentModes.resize( presentModeCount ); 20032 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20033 d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 20034 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 20035 &presentModeCount, 20036 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 20037 } 20038 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20039 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 20040 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 20041 if ( presentModeCount < presentModes.size() ) 20042 { 20043 presentModes.resize( presentModeCount ); 20044 } 20045 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 20046 } 20047 20048 template <typename PresentModeKHRAllocator, 20049 typename Dispatch, 20050 typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 20051 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const20052 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 20053 PresentModeKHRAllocator & presentModeKHRAllocator, 20054 Dispatch const & d ) const 20055 { 20056 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20057 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20058 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && 20059 "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); 20060 # endif 20061 20062 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 20063 uint32_t presentModeCount; 20064 VULKAN_HPP_NAMESPACE::Result result; 20065 do 20066 { 20067 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 20068 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); 20069 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 20070 { 20071 presentModes.resize( presentModeCount ); 20072 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20073 d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 20074 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 20075 &presentModeCount, 20076 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 20077 } 20078 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20079 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 20080 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 20081 if ( presentModeCount < presentModes.size() ) 20082 { 20083 presentModes.resize( presentModeCount ); 20084 } 20085 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 20086 } 20087 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20088 20089 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20090 template <typename Dispatch> acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const20091 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 20092 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20093 { 20094 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20095 return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 20096 } 20097 # else 20098 template <typename Dispatch> 20099 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const20100 Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 20101 { 20102 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20103 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20104 VULKAN_HPP_ASSERT( d.vkAcquireFullScreenExclusiveModeEXT && "Function <vkAcquireFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" ); 20105 # endif 20106 20107 VULKAN_HPP_NAMESPACE::Result result = 20108 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 20109 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); 20110 20111 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20112 } 20113 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20114 20115 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20116 template <typename Dispatch> releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const20117 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 20118 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20119 { 20120 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20121 return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 20122 } 20123 # else 20124 template <typename Dispatch> 20125 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const20126 Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 20127 { 20128 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20129 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20130 VULKAN_HPP_ASSERT( d.vkReleaseFullScreenExclusiveModeEXT && "Function <vkReleaseFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" ); 20131 # endif 20132 20133 VULKAN_HPP_NAMESPACE::Result result = 20134 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 20135 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); 20136 20137 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20138 } 20139 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20140 20141 template <typename Dispatch> 20142 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getGroupSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const20143 Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 20144 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 20145 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20146 { 20147 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20148 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( 20149 m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 20150 } 20151 20152 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20153 template <typename Dispatch> 20154 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const20155 Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 20156 { 20157 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20158 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20159 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModes2EXT && 20160 "Function <vkGetDeviceGroupSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); 20161 # endif 20162 20163 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 20164 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( 20165 m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 20166 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); 20167 20168 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); 20169 } 20170 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20171 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 20172 20173 //=== VK_EXT_headless_surface === 20174 20175 template <typename Dispatch> createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const20176 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, 20177 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20178 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 20179 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20180 { 20181 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20182 return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, 20183 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), 20184 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 20185 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 20186 } 20187 20188 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20189 template <typename Dispatch> 20190 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20191 Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, 20192 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20193 Dispatch const & d ) const 20194 { 20195 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20196 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20197 VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" ); 20198 # endif 20199 20200 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20201 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT( 20202 m_instance, 20203 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 20204 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20205 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20206 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); 20207 20208 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 20209 } 20210 20211 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20212 template <typename Dispatch> 20213 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createHeadlessSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20214 Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, 20215 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20216 Dispatch const & d ) const 20217 { 20218 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20219 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20220 VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" ); 20221 # endif 20222 20223 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20224 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT( 20225 m_instance, 20226 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 20227 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20228 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20229 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); 20230 20231 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 20232 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 20233 } 20234 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20235 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20236 20237 //=== VK_KHR_buffer_device_address === 20238 20239 template <typename Dispatch> getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const20240 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 20241 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20242 { 20243 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20244 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 20245 } 20246 20247 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20248 template <typename Dispatch> getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const20249 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 20250 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20251 { 20252 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20253 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20254 VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressKHR && 20255 "Function <vkGetBufferDeviceAddressKHR> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 20256 # endif 20257 20258 VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 20259 20260 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 20261 } 20262 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20263 20264 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const20265 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 20266 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20267 { 20268 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20269 return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 20270 } 20271 20272 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20273 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const20274 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 20275 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20276 { 20277 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20278 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20279 VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddressKHR && 20280 "Function <vkGetBufferOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 20281 # endif 20282 20283 uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 20284 20285 return result; 20286 } 20287 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20288 20289 template <typename Dispatch> getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const20290 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 20291 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20292 { 20293 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20294 return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 20295 } 20296 20297 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20298 template <typename Dispatch> getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const20299 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, 20300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20301 { 20302 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20303 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20304 VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddressKHR && 20305 "Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 20306 # endif 20307 20308 uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 20309 20310 return result; 20311 } 20312 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20313 20314 //=== VK_EXT_line_rasterization === 20315 20316 template <typename Dispatch> 20317 VULKAN_HPP_INLINE void setLineStippleEXT(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const20318 CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20319 { 20320 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20321 d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); 20322 } 20323 20324 //=== VK_EXT_host_query_reset === 20325 20326 template <typename Dispatch> resetQueryPoolEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const20327 VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 20328 uint32_t firstQuery, 20329 uint32_t queryCount, 20330 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20331 { 20332 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20333 d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 20334 } 20335 20336 //=== VK_EXT_extended_dynamic_state === 20337 20338 template <typename Dispatch> setCullModeEXT(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const20339 VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20340 { 20341 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20342 d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); 20343 } 20344 20345 template <typename Dispatch> setFrontFaceEXT(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const20346 VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20347 { 20348 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20349 d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); 20350 } 20351 20352 template <typename Dispatch> setPrimitiveTopologyEXT(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const20353 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, 20354 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20355 { 20356 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20357 d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); 20358 } 20359 20360 template <typename Dispatch> setViewportWithCountEXT(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const20361 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, 20362 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 20363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20364 { 20365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20366 d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 20367 } 20368 20369 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20370 template <typename Dispatch> setViewportWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const20371 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 20372 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20373 { 20374 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20375 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20376 VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCountEXT && 20377 "Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 20378 # endif 20379 20380 d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 20381 } 20382 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20383 20384 template <typename Dispatch> 20385 VULKAN_HPP_INLINE void setScissorWithCountEXT(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const20386 CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20387 { 20388 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20389 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 20390 } 20391 20392 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20393 template <typename Dispatch> setScissorWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const20394 VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 20395 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20396 { 20397 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20398 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20399 VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCountEXT && 20400 "Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 20401 # endif 20402 20403 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 20404 } 20405 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20406 20407 template <typename Dispatch> bindVertexBuffers2EXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const20408 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 20409 uint32_t bindingCount, 20410 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 20411 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 20412 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 20413 const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, 20414 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20415 { 20416 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20417 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, 20418 firstBinding, 20419 bindingCount, 20420 reinterpret_cast<const VkBuffer *>( pBuffers ), 20421 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 20422 reinterpret_cast<const VkDeviceSize *>( pSizes ), 20423 reinterpret_cast<const VkDeviceSize *>( pStrides ) ); 20424 } 20425 20426 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20427 template <typename Dispatch> bindVertexBuffers2EXT(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const20428 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 20429 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 20430 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 20431 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 20432 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, 20433 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 20434 { 20435 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20436 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20437 VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2EXT && 20438 "Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 20439 # endif 20440 # ifdef VULKAN_HPP_NO_EXCEPTIONS 20441 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 20442 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 20443 VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); 20444 # else 20445 if ( buffers.size() != offsets.size() ) 20446 { 20447 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); 20448 } 20449 if ( !sizes.empty() && buffers.size() != sizes.size() ) 20450 { 20451 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); 20452 } 20453 if ( !strides.empty() && buffers.size() != strides.size() ) 20454 { 20455 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); 20456 } 20457 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 20458 20459 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, 20460 firstBinding, 20461 buffers.size(), 20462 reinterpret_cast<const VkBuffer *>( buffers.data() ), 20463 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 20464 reinterpret_cast<const VkDeviceSize *>( sizes.data() ), 20465 reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); 20466 } 20467 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20468 20469 template <typename Dispatch> setDepthTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const20470 VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20471 { 20472 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20473 d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); 20474 } 20475 20476 template <typename Dispatch> setDepthWriteEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const20477 VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20478 { 20479 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20480 d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); 20481 } 20482 20483 template <typename Dispatch> setDepthCompareOpEXT(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const20484 VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20485 { 20486 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20487 d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); 20488 } 20489 20490 template <typename Dispatch> setDepthBoundsTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const20491 VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, 20492 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20493 { 20494 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20495 d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); 20496 } 20497 20498 template <typename Dispatch> setStencilTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const20499 VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20500 { 20501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20502 d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); 20503 } 20504 20505 template <typename Dispatch> setStencilOpEXT(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const20506 VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 20507 VULKAN_HPP_NAMESPACE::StencilOp failOp, 20508 VULKAN_HPP_NAMESPACE::StencilOp passOp, 20509 VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, 20510 VULKAN_HPP_NAMESPACE::CompareOp compareOp, 20511 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20512 { 20513 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20514 d.vkCmdSetStencilOpEXT( m_commandBuffer, 20515 static_cast<VkStencilFaceFlags>( faceMask ), 20516 static_cast<VkStencilOp>( failOp ), 20517 static_cast<VkStencilOp>( passOp ), 20518 static_cast<VkStencilOp>( depthFailOp ), 20519 static_cast<VkCompareOp>( compareOp ) ); 20520 } 20521 20522 //=== VK_KHR_deferred_host_operations === 20523 20524 template <typename Dispatch> createDeferredOperationKHR(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,Dispatch const & d) const20525 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20526 VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, 20527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20528 { 20529 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20530 return static_cast<Result>( d.vkCreateDeferredOperationKHR( 20531 m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) ); 20532 } 20533 20534 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20535 template <typename Dispatch> 20536 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20537 Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 20538 { 20539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20540 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20541 VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 20542 # endif 20543 20544 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 20545 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR( 20546 m_device, 20547 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20548 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 20549 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); 20550 20551 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deferredOperation ) ); 20552 } 20553 20554 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20555 template <typename Dispatch> 20556 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type createDeferredOperationKHRUnique(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20557 Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 20558 { 20559 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20560 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20561 VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 20562 # endif 20563 20564 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 20565 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR( 20566 m_device, 20567 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20568 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 20569 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); 20570 20571 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 20572 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( deferredOperation, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 20573 } 20574 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20575 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20576 20577 template <typename Dispatch> destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const20578 VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20579 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20580 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20581 { 20582 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20583 d.vkDestroyDeferredOperationKHR( 20584 m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 20585 } 20586 20587 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20588 template <typename Dispatch> destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20589 VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20590 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20591 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20592 { 20593 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20594 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20595 VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 20596 # endif 20597 20598 d.vkDestroyDeferredOperationKHR( 20599 m_device, 20600 static_cast<VkDeferredOperationKHR>( operation ), 20601 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 20602 } 20603 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20604 20605 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const20606 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20607 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20608 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20609 { 20610 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20611 d.vkDestroyDeferredOperationKHR( 20612 m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 20613 } 20614 20615 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20616 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20617 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20618 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20619 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20620 { 20621 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20622 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20623 VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 20624 # endif 20625 20626 d.vkDestroyDeferredOperationKHR( 20627 m_device, 20628 static_cast<VkDeferredOperationKHR>( operation ), 20629 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 20630 } 20631 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20632 20633 template <typename Dispatch> getDeferredOperationMaxConcurrencyKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20634 VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20635 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20636 { 20637 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20638 return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ); 20639 } 20640 20641 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20642 template <typename Dispatch> getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20643 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20644 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20645 { 20646 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20647 return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 20648 } 20649 #else 20650 template <typename Dispatch> 20651 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20652 Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20653 { 20654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20655 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20656 VULKAN_HPP_ASSERT( d.vkGetDeferredOperationResultKHR && "Function <vkGetDeferredOperationResultKHR> requires <VK_KHR_deferred_host_operations>" ); 20657 # endif 20658 20659 VULKAN_HPP_NAMESPACE::Result result = 20660 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 20661 20662 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 20663 } 20664 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20665 20666 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20667 template <typename Dispatch> deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20668 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20669 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20670 { 20671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20672 return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 20673 } 20674 #else 20675 template <typename Dispatch> deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20676 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20677 Dispatch const & d ) const 20678 { 20679 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20680 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20681 VULKAN_HPP_ASSERT( d.vkDeferredOperationJoinKHR && "Function <vkDeferredOperationJoinKHR> requires <VK_KHR_deferred_host_operations>" ); 20682 # endif 20683 20684 VULKAN_HPP_NAMESPACE::Result result = 20685 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 20686 VULKAN_HPP_NAMESPACE::detail::resultCheck( 20687 result, 20688 VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", 20689 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); 20690 20691 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 20692 } 20693 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20694 20695 //=== VK_KHR_pipeline_executable_properties === 20696 20697 template <typename Dispatch> getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,uint32_t * pExecutableCount,VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,Dispatch const & d) const20698 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, 20699 uint32_t * pExecutableCount, 20700 VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, 20701 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20702 { 20703 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20704 return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, 20705 reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ), 20706 pExecutableCount, 20707 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) ); 20708 } 20709 20710 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20711 template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch> 20712 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20713 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,Dispatch const & d) const20714 Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const 20715 { 20716 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20717 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20718 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && 20719 "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20720 # endif 20721 20722 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties; 20723 uint32_t executableCount; 20724 VULKAN_HPP_NAMESPACE::Result result; 20725 do 20726 { 20727 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20728 d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 20729 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) 20730 { 20731 properties.resize( executableCount ); 20732 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20733 d.vkGetPipelineExecutablePropertiesKHR( m_device, 20734 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 20735 &executableCount, 20736 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 20737 } 20738 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20739 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 20740 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 20741 if ( executableCount < properties.size() ) 20742 { 20743 properties.resize( executableCount ); 20744 } 20745 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 20746 } 20747 20748 template <typename PipelineExecutablePropertiesKHRAllocator, 20749 typename Dispatch, 20750 typename std::enable_if< 20751 std::is_same<typename PipelineExecutablePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, 20752 int>::type> 20753 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20754 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,Dispatch const & d) const20755 Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, 20756 PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, 20757 Dispatch const & d ) const 20758 { 20759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20760 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20761 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && 20762 "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20763 # endif 20764 20765 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties( 20766 pipelineExecutablePropertiesKHRAllocator ); 20767 uint32_t executableCount; 20768 VULKAN_HPP_NAMESPACE::Result result; 20769 do 20770 { 20771 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20772 d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 20773 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) 20774 { 20775 properties.resize( executableCount ); 20776 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20777 d.vkGetPipelineExecutablePropertiesKHR( m_device, 20778 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 20779 &executableCount, 20780 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 20781 } 20782 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20783 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 20784 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 20785 if ( executableCount < properties.size() ) 20786 { 20787 properties.resize( executableCount ); 20788 } 20789 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 20790 } 20791 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20792 20793 template <typename Dispatch> 20794 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pStatisticCount,VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,Dispatch const & d) const20795 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 20796 uint32_t * pStatisticCount, 20797 VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, 20798 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20799 { 20800 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20801 return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, 20802 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 20803 pStatisticCount, 20804 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) ); 20805 } 20806 20807 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20808 template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch> 20809 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20810 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const20811 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const 20812 { 20813 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20814 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20815 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && 20816 "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20817 # endif 20818 20819 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics; 20820 uint32_t statisticCount; 20821 VULKAN_HPP_NAMESPACE::Result result; 20822 do 20823 { 20824 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR( 20825 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); 20826 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) 20827 { 20828 statistics.resize( statisticCount ); 20829 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20830 d.vkGetPipelineExecutableStatisticsKHR( m_device, 20831 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 20832 &statisticCount, 20833 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 20834 } 20835 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20836 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 20837 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 20838 if ( statisticCount < statistics.size() ) 20839 { 20840 statistics.resize( statisticCount ); 20841 } 20842 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); 20843 } 20844 20845 template <typename PipelineExecutableStatisticKHRAllocator, 20846 typename Dispatch, 20847 typename std::enable_if< 20848 std::is_same<typename PipelineExecutableStatisticKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, 20849 int>::type> 20850 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20851 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,Dispatch const & d) const20852 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, 20853 PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, 20854 Dispatch const & d ) const 20855 { 20856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20857 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20858 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && 20859 "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20860 # endif 20861 20862 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics( 20863 pipelineExecutableStatisticKHRAllocator ); 20864 uint32_t statisticCount; 20865 VULKAN_HPP_NAMESPACE::Result result; 20866 do 20867 { 20868 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR( 20869 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); 20870 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) 20871 { 20872 statistics.resize( statisticCount ); 20873 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20874 d.vkGetPipelineExecutableStatisticsKHR( m_device, 20875 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 20876 &statisticCount, 20877 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 20878 } 20879 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20880 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 20881 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 20882 if ( statisticCount < statistics.size() ) 20883 { 20884 statistics.resize( statisticCount ); 20885 } 20886 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); 20887 } 20888 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20889 20890 template <typename Dispatch> 20891 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pInternalRepresentationCount,VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,Dispatch const & d) const20892 Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 20893 uint32_t * pInternalRepresentationCount, 20894 VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, 20895 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20896 { 20897 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20898 return static_cast<Result>( 20899 d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, 20900 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 20901 pInternalRepresentationCount, 20902 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) ); 20903 } 20904 20905 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20906 template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch> 20907 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 20908 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const20909 Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const 20910 { 20911 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20912 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20913 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && 20914 "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20915 # endif 20916 20917 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 20918 internalRepresentations; 20919 uint32_t internalRepresentationCount; 20920 VULKAN_HPP_NAMESPACE::Result result; 20921 do 20922 { 20923 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 20924 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); 20925 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) 20926 { 20927 internalRepresentations.resize( internalRepresentationCount ); 20928 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 20929 m_device, 20930 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 20931 &internalRepresentationCount, 20932 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 20933 } 20934 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20935 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 20936 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 20937 if ( internalRepresentationCount < internalRepresentations.size() ) 20938 { 20939 internalRepresentations.resize( internalRepresentationCount ); 20940 } 20941 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); 20942 } 20943 20944 template <typename PipelineExecutableInternalRepresentationKHRAllocator, 20945 typename Dispatch, 20946 typename std::enable_if<std::is_same<typename PipelineExecutableInternalRepresentationKHRAllocator::value_type, 20947 VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value, 20948 int>::type> 20949 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 20950 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,Dispatch const & d) const20951 Device::getPipelineExecutableInternalRepresentationsKHR( 20952 const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, 20953 PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, 20954 Dispatch const & d ) const 20955 { 20956 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20957 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20958 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && 20959 "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20960 # endif 20961 20962 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 20963 internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); 20964 uint32_t internalRepresentationCount; 20965 VULKAN_HPP_NAMESPACE::Result result; 20966 do 20967 { 20968 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 20969 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); 20970 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) 20971 { 20972 internalRepresentations.resize( internalRepresentationCount ); 20973 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 20974 m_device, 20975 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 20976 &internalRepresentationCount, 20977 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 20978 } 20979 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20980 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 20981 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 20982 if ( internalRepresentationCount < internalRepresentations.size() ) 20983 { 20984 internalRepresentations.resize( internalRepresentationCount ); 20985 } 20986 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); 20987 } 20988 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20989 20990 //=== VK_EXT_host_image_copy === 20991 20992 template <typename Dispatch> copyMemoryToImageEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo,Dispatch const & d) const20993 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, 20994 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20995 { 20996 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20997 return static_cast<Result>( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfo *>( pCopyMemoryToImageInfo ) ) ); 20998 } 20999 21000 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21001 template <typename Dispatch> 21002 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyMemoryToImageEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo,Dispatch const & d) const21003 Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const 21004 { 21005 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21006 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21007 VULKAN_HPP_ASSERT( d.vkCopyMemoryToImageEXT && "Function <vkCopyMemoryToImageEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 21008 # endif 21009 21010 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21011 d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfo *>( ©MemoryToImageInfo ) ) ); 21012 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); 21013 21014 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21015 } 21016 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21017 21018 template <typename Dispatch> copyImageToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo,Dispatch const & d) const21019 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, 21020 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21021 { 21022 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21023 return static_cast<Result>( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfo *>( pCopyImageToMemoryInfo ) ) ); 21024 } 21025 21026 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21027 template <typename Dispatch> 21028 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo,Dispatch const & d) const21029 Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const 21030 { 21031 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21032 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21033 VULKAN_HPP_ASSERT( d.vkCopyImageToMemoryEXT && "Function <vkCopyImageToMemoryEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 21034 # endif 21035 21036 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21037 d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfo *>( ©ImageToMemoryInfo ) ) ); 21038 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); 21039 21040 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21041 } 21042 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21043 21044 template <typename Dispatch> copyImageToImageEXT(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo,Dispatch const & d) const21045 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, 21046 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21047 { 21048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21049 return static_cast<Result>( d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfo *>( pCopyImageToImageInfo ) ) ); 21050 } 21051 21052 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21053 template <typename Dispatch> 21054 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToImageEXT(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo,Dispatch const & d) const21055 Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const 21056 { 21057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21058 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21059 VULKAN_HPP_ASSERT( d.vkCopyImageToImageEXT && "Function <vkCopyImageToImageEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 21060 # endif 21061 21062 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21063 d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfo *>( ©ImageToImageInfo ) ) ); 21064 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); 21065 21066 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21067 } 21068 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21069 21070 template <typename Dispatch> transitionImageLayoutEXT(uint32_t transitionCount,const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions,Dispatch const & d) const21071 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, 21072 const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, 21073 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21074 { 21075 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21076 return static_cast<Result>( 21077 d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( pTransitions ) ) ); 21078 } 21079 21080 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21081 template <typename Dispatch> 21082 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type transitionImageLayoutEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo> const & transitions,Dispatch const & d) const21083 Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo> const & transitions, 21084 Dispatch const & d ) const 21085 { 21086 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21087 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21088 VULKAN_HPP_ASSERT( d.vkTransitionImageLayoutEXT && "Function <vkTransitionImageLayoutEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 21089 # endif 21090 21091 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21092 d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( transitions.data() ) ) ); 21093 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); 21094 21095 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21096 } 21097 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21098 21099 template <typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const21100 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, 21101 const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, 21102 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 21103 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21104 { 21105 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21106 d.vkGetImageSubresourceLayout2EXT( m_device, 21107 static_cast<VkImage>( image ), 21108 reinterpret_cast<const VkImageSubresource2 *>( pSubresource ), 21109 reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 21110 } 21111 21112 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21113 template <typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const21114 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2EXT( 21115 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21116 { 21117 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21118 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21119 VULKAN_HPP_ASSERT( 21120 d.vkGetImageSubresourceLayout2EXT && 21121 "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 21122 # endif 21123 21124 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 21125 d.vkGetImageSubresourceLayout2EXT( m_device, 21126 static_cast<VkImage>( image ), 21127 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 21128 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 21129 21130 return layout; 21131 } 21132 21133 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const21134 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT( 21135 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21136 { 21137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21138 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21139 VULKAN_HPP_ASSERT( 21140 d.vkGetImageSubresourceLayout2EXT && 21141 "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 21142 # endif 21143 21144 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 21145 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 21146 d.vkGetImageSubresourceLayout2EXT( m_device, 21147 static_cast<VkImage>( image ), 21148 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 21149 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 21150 21151 return structureChain; 21152 } 21153 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21154 21155 //=== VK_KHR_map_memory2 === 21156 21157 template <typename Dispatch> mapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo,void ** ppData,Dispatch const & d) const21158 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, 21159 void ** ppData, 21160 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21161 { 21162 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21163 return static_cast<Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfo *>( pMemoryMapInfo ), ppData ) ); 21164 } 21165 21166 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21167 template <typename Dispatch> 21168 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type mapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo,Dispatch const & d) const21169 Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, Dispatch const & d ) const 21170 { 21171 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21172 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21173 VULKAN_HPP_ASSERT( d.vkMapMemory2KHR && "Function <vkMapMemory2KHR> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); 21174 # endif 21175 21176 void * pData; 21177 VULKAN_HPP_NAMESPACE::Result result = 21178 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfo *>( &memoryMapInfo ), &pData ) ); 21179 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); 21180 21181 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); 21182 } 21183 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21184 21185 template <typename Dispatch> unmapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo,Dispatch const & d) const21186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, 21187 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21188 { 21189 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21190 return static_cast<Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfo *>( pMemoryUnmapInfo ) ) ); 21191 } 21192 21193 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21194 template <typename Dispatch> unmapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo,Dispatch const & d) const21195 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, 21196 Dispatch const & d ) const 21197 { 21198 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21199 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21200 VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function <vkUnmapMemory2KHR> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); 21201 # endif 21202 21203 VULKAN_HPP_NAMESPACE::Result result = 21204 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfo *>( &memoryUnmapInfo ) ) ); 21205 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); 21206 21207 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21208 } 21209 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21210 21211 //=== VK_EXT_swapchain_maintenance1 === 21212 21213 template <typename Dispatch> releaseSwapchainImagesEXT(const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo,Dispatch const & d) const21214 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, 21215 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21216 { 21217 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21218 return static_cast<Result>( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) ); 21219 } 21220 21221 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21222 template <typename Dispatch> 21223 VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseSwapchainImagesEXT(const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo,Dispatch const & d) const21224 Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const 21225 { 21226 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21227 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21228 VULKAN_HPP_ASSERT( d.vkReleaseSwapchainImagesEXT && "Function <vkReleaseSwapchainImagesEXT> requires <VK_EXT_swapchain_maintenance1>" ); 21229 # endif 21230 21231 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21232 d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) ) ); 21233 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); 21234 21235 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21236 } 21237 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21238 21239 //=== VK_NV_device_generated_commands === 21240 21241 template <typename Dispatch> getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const21242 VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, 21243 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 21244 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21245 { 21246 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21247 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 21248 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), 21249 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 21250 } 21251 21252 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21253 template <typename Dispatch> 21254 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const21255 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, 21256 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21257 { 21258 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21259 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21260 VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && 21261 "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" ); 21262 # endif 21263 21264 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 21265 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 21266 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 21267 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 21268 21269 return memoryRequirements; 21270 } 21271 21272 template <typename X, typename Y, typename... Z, typename Dispatch> 21273 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const21274 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, 21275 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21276 { 21277 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21278 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21279 VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && 21280 "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" ); 21281 # endif 21282 21283 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 21284 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 21285 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 21286 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 21287 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 21288 21289 return structureChain; 21290 } 21291 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21292 21293 template <typename Dispatch> preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const21294 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 21295 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21296 { 21297 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21298 d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 21299 } 21300 21301 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21302 template <typename Dispatch> preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const21303 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, 21304 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21305 { 21306 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21307 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21308 VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsNV && "Function <vkCmdPreprocessGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" ); 21309 # endif 21310 21311 d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 21312 } 21313 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21314 21315 template <typename Dispatch> executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const21316 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 21317 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 21318 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21319 { 21320 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21321 d.vkCmdExecuteGeneratedCommandsNV( 21322 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 21323 } 21324 21325 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21326 template <typename Dispatch> executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const21327 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 21328 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, 21329 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21330 { 21331 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21332 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21333 VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsNV && "Function <vkCmdExecuteGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" ); 21334 # endif 21335 21336 d.vkCmdExecuteGeneratedCommandsNV( 21337 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 21338 } 21339 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21340 21341 template <typename Dispatch> bindPipelineShaderGroupNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t groupIndex,Dispatch const & d) const21342 VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 21343 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 21344 uint32_t groupIndex, 21345 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21346 { 21347 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21348 d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex ); 21349 } 21350 21351 template <typename Dispatch> 21352 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,Dispatch const & d) const21353 Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, 21354 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21355 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, 21356 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21357 { 21358 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21359 return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, 21360 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), 21361 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 21362 reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) ); 21363 } 21364 21365 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21366 template <typename Dispatch> 21367 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21368 Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, 21369 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21370 Dispatch const & d ) const 21371 { 21372 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21373 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21374 VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 21375 # endif 21376 21377 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 21378 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV( 21379 m_device, 21380 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 21381 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21382 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 21383 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); 21384 21385 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); 21386 } 21387 21388 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21389 template <typename Dispatch> 21390 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type createIndirectCommandsLayoutNVUnique(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21391 Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, 21392 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21393 Dispatch const & d ) const 21394 { 21395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21396 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21397 VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 21398 # endif 21399 21400 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 21401 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV( 21402 m_device, 21403 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 21404 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21405 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 21406 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); 21407 21408 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 21409 UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>( 21410 indirectCommandsLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 21411 } 21412 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21413 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21414 21415 template <typename Dispatch> destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21416 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 21417 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21418 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21419 { 21420 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21421 d.vkDestroyIndirectCommandsLayoutNV( 21422 m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21423 } 21424 21425 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21426 template <typename Dispatch> destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21427 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 21428 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21429 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21430 { 21431 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21432 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21433 VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 21434 # endif 21435 21436 d.vkDestroyIndirectCommandsLayoutNV( 21437 m_device, 21438 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 21439 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21440 } 21441 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21442 21443 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21444 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 21445 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21446 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21447 { 21448 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21449 d.vkDestroyIndirectCommandsLayoutNV( 21450 m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21451 } 21452 21453 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21454 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21455 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 21456 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21457 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21458 { 21459 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21460 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21461 VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 21462 # endif 21463 21464 d.vkDestroyIndirectCommandsLayoutNV( 21465 m_device, 21466 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 21467 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21468 } 21469 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21470 21471 //=== VK_EXT_depth_bias_control === 21472 21473 template <typename Dispatch> setDepthBias2EXT(const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo,Dispatch const & d) const21474 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, 21475 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21476 { 21477 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21478 d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) ); 21479 } 21480 21481 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21482 template <typename Dispatch> setDepthBias2EXT(const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo,Dispatch const & d) const21483 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, 21484 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21485 { 21486 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21487 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21488 VULKAN_HPP_ASSERT( d.vkCmdSetDepthBias2EXT && "Function <vkCmdSetDepthBias2EXT> requires <VK_EXT_depth_bias_control>" ); 21489 # endif 21490 21491 d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( &depthBiasInfo ) ); 21492 } 21493 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21494 21495 //=== VK_EXT_acquire_drm_display === 21496 21497 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 21498 template <typename Dispatch> acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const21499 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, 21500 VULKAN_HPP_NAMESPACE::DisplayKHR display, 21501 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21502 { 21503 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21504 return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); 21505 } 21506 #else 21507 template <typename Dispatch> 21508 VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const21509 PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 21510 { 21511 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21512 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21513 VULKAN_HPP_ASSERT( d.vkAcquireDrmDisplayEXT && "Function <vkAcquireDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); 21514 # endif 21515 21516 VULKAN_HPP_NAMESPACE::Result result = 21517 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); 21518 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); 21519 21520 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21521 } 21522 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 21523 21524 template <typename Dispatch> getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,VULKAN_HPP_NAMESPACE::DisplayKHR * display,Dispatch const & d) const21525 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, 21526 uint32_t connectorId, 21527 VULKAN_HPP_NAMESPACE::DisplayKHR * display, 21528 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21529 { 21530 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21531 return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) ); 21532 } 21533 21534 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21535 template <typename Dispatch> 21536 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const21537 PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 21538 { 21539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21540 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21541 VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); 21542 # endif 21543 21544 VULKAN_HPP_NAMESPACE::DisplayKHR display; 21545 VULKAN_HPP_NAMESPACE::Result result = 21546 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 21547 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); 21548 21549 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); 21550 } 21551 21552 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21553 template <typename Dispatch> 21554 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getDrmDisplayEXTUnique(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const21555 PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 21556 { 21557 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21558 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21559 VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); 21560 # endif 21561 21562 VULKAN_HPP_NAMESPACE::DisplayKHR display; 21563 VULKAN_HPP_NAMESPACE::Result result = 21564 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 21565 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); 21566 21567 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 21568 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 21569 } 21570 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21571 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21572 21573 //=== VK_EXT_private_data === 21574 21575 template <typename Dispatch> createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,Dispatch const & d) const21576 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, 21577 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21578 VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, 21579 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21580 { 21581 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21582 return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, 21583 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), 21584 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 21585 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); 21586 } 21587 21588 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21589 template <typename Dispatch> 21590 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21591 Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 21592 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21593 Dispatch const & d ) const 21594 { 21595 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21596 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21597 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21598 # endif 21599 21600 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 21601 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT( 21602 m_device, 21603 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 21604 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21605 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 21606 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); 21607 21608 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); 21609 } 21610 21611 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21612 template <typename Dispatch> 21613 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotEXTUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21614 Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 21615 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21616 Dispatch const & d ) const 21617 { 21618 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21619 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21620 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21621 # endif 21622 21623 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 21624 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT( 21625 m_device, 21626 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 21627 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21628 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 21629 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); 21630 21631 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 21632 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 21633 } 21634 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21635 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21636 21637 template <typename Dispatch> destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21638 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21639 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21640 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21641 { 21642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21643 d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21644 } 21645 21646 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21647 template <typename Dispatch> destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21648 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21649 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21650 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21651 { 21652 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21653 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21654 VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlotEXT && "Function <vkDestroyPrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21655 # endif 21656 21657 d.vkDestroyPrivateDataSlotEXT( 21658 m_device, 21659 static_cast<VkPrivateDataSlot>( privateDataSlot ), 21660 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21661 } 21662 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21663 21664 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 21665 template <typename Dispatch> setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const21666 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 21667 uint64_t objectHandle, 21668 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21669 uint64_t data, 21670 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21671 { 21672 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21673 return static_cast<Result>( 21674 d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 21675 } 21676 #else 21677 template <typename Dispatch> setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const21678 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 21679 uint64_t objectHandle, 21680 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21681 uint64_t data, 21682 Dispatch const & d ) const 21683 { 21684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21685 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21686 VULKAN_HPP_ASSERT( d.vkSetPrivateDataEXT && "Function <vkSetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21687 # endif 21688 21689 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21690 d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 21691 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); 21692 21693 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21694 } 21695 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 21696 21697 template <typename Dispatch> getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t * pData,Dispatch const & d) const21698 VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 21699 uint64_t objectHandle, 21700 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21701 uint64_t * pData, 21702 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21703 { 21704 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21705 d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); 21706 } 21707 21708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21709 template <typename Dispatch> getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Dispatch const & d) const21710 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 21711 uint64_t objectHandle, 21712 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21713 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21714 { 21715 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21716 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21717 VULKAN_HPP_ASSERT( d.vkGetPrivateDataEXT && "Function <vkGetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21718 # endif 21719 21720 uint64_t data; 21721 d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); 21722 21723 return data; 21724 } 21725 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21726 21727 //=== VK_KHR_video_encode_queue === 21728 21729 template <typename Dispatch> 21730 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo,VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties,Dispatch const & d) const21731 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, 21732 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, 21733 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21734 { 21735 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21736 return static_cast<Result>( 21737 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 21738 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( pQualityLevelInfo ), 21739 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( pQualityLevelProperties ) ) ); 21740 } 21741 21742 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21743 template <typename Dispatch> 21744 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>::type getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,Dispatch const & d) const21745 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, 21746 Dispatch const & d ) const 21747 { 21748 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21749 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21750 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && 21751 "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" ); 21752 # endif 21753 21754 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; 21755 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21756 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 21757 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), 21758 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); 21759 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); 21760 21761 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( qualityLevelProperties ) ); 21762 } 21763 21764 template <typename X, typename Y, typename... Z, typename Dispatch> 21765 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,Dispatch const & d) const21766 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, 21767 Dispatch const & d ) const 21768 { 21769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21770 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21771 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && 21772 "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" ); 21773 # endif 21774 21775 StructureChain<X, Y, Z...> structureChain; 21776 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = 21777 structureChain.template get<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>(); 21778 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21779 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 21780 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), 21781 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); 21782 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); 21783 21784 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 21785 } 21786 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21787 21788 template <typename Dispatch> 21789 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo,VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo,size_t * pDataSize,void * pData,Dispatch const & d) const21790 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, 21791 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, 21792 size_t * pDataSize, 21793 void * pData, 21794 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21795 { 21796 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21797 return static_cast<Result>( 21798 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21799 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( pVideoSessionParametersInfo ), 21800 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( pFeedbackInfo ), 21801 pDataSize, 21802 pData ) ); 21803 } 21804 21805 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21806 template <typename Uint8_tAllocator, typename Dispatch> 21807 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21808 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Dispatch const & d) const21809 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 21810 Dispatch const & d ) const 21811 { 21812 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21813 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21814 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 21815 # endif 21816 21817 std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_; 21818 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; 21819 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 21820 size_t dataSize; 21821 VULKAN_HPP_NAMESPACE::Result result; 21822 do 21823 { 21824 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21825 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21826 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21827 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21828 &dataSize, 21829 nullptr ) ); 21830 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 21831 { 21832 data.resize( dataSize ); 21833 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21834 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21835 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21836 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21837 &dataSize, 21838 reinterpret_cast<void *>( data.data() ) ) ); 21839 } 21840 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21841 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 21842 21843 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 21844 } 21845 21846 template <typename Uint8_tAllocator, 21847 typename Dispatch, 21848 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 21849 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21850 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const21851 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 21852 Uint8_tAllocator & uint8_tAllocator, 21853 Dispatch const & d ) const 21854 { 21855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21856 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21857 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 21858 # endif 21859 21860 std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_( 21861 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 21862 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; 21863 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 21864 size_t dataSize; 21865 VULKAN_HPP_NAMESPACE::Result result; 21866 do 21867 { 21868 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21869 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21870 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21871 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21872 &dataSize, 21873 nullptr ) ); 21874 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 21875 { 21876 data.resize( dataSize ); 21877 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21878 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21879 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21880 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21881 &dataSize, 21882 reinterpret_cast<void *>( data.data() ) ) ); 21883 } 21884 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21885 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 21886 21887 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 21888 } 21889 21890 template <typename X, typename Y, typename... Z, typename Uint8_tAllocator, typename Dispatch> 21891 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21892 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Dispatch const & d) const21893 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 21894 Dispatch const & d ) const 21895 { 21896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21897 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21898 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 21899 # endif 21900 21901 std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_; 21902 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = 21903 data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>(); 21904 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 21905 size_t dataSize; 21906 VULKAN_HPP_NAMESPACE::Result result; 21907 do 21908 { 21909 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21910 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21911 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21912 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21913 &dataSize, 21914 nullptr ) ); 21915 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 21916 { 21917 data.resize( dataSize ); 21918 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21919 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21920 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21921 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21922 &dataSize, 21923 reinterpret_cast<void *>( data.data() ) ) ); 21924 } 21925 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21926 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 21927 21928 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 21929 } 21930 21931 template <typename X, 21932 typename Y, 21933 typename... Z, 21934 typename Uint8_tAllocator, 21935 typename Dispatch, 21936 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 21937 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21938 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const21939 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 21940 Uint8_tAllocator & uint8_tAllocator, 21941 Dispatch const & d ) const 21942 { 21943 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21944 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21945 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 21946 # endif 21947 21948 std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_( 21949 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 21950 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = 21951 data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>(); 21952 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 21953 size_t dataSize; 21954 VULKAN_HPP_NAMESPACE::Result result; 21955 do 21956 { 21957 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21958 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21959 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21960 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21961 &dataSize, 21962 nullptr ) ); 21963 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 21964 { 21965 data.resize( dataSize ); 21966 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21967 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21968 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21969 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21970 &dataSize, 21971 reinterpret_cast<void *>( data.data() ) ) ); 21972 } 21973 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21974 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 21975 21976 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 21977 } 21978 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21979 21980 template <typename Dispatch> encodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,Dispatch const & d) const21981 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, 21982 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21983 { 21984 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21985 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) ); 21986 } 21987 21988 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21989 template <typename Dispatch> encodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,Dispatch const & d) const21990 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, 21991 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21992 { 21993 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21994 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21995 VULKAN_HPP_ASSERT( d.vkCmdEncodeVideoKHR && "Function <vkCmdEncodeVideoKHR> requires <VK_KHR_video_encode_queue>" ); 21996 # endif 21997 21998 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) ); 21999 } 22000 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22001 22002 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 22003 //=== VK_NV_cuda_kernel_launch === 22004 22005 template <typename Dispatch> createCudaModuleNV(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule,Dispatch const & d) const22006 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, 22007 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22008 VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, 22009 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22010 { 22011 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22012 return static_cast<Result>( d.vkCreateCudaModuleNV( m_device, 22013 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ), 22014 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22015 reinterpret_cast<VkCudaModuleNV *>( pModule ) ) ); 22016 } 22017 22018 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22019 template <typename Dispatch> 22020 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaModuleNV>::type createCudaModuleNV(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22021 Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, 22022 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22023 Dispatch const & d ) const 22024 { 22025 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22026 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22027 VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 22028 # endif 22029 22030 VULKAN_HPP_NAMESPACE::CudaModuleNV module; 22031 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22032 d.vkCreateCudaModuleNV( m_device, 22033 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), 22034 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22035 reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); 22036 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); 22037 22038 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); 22039 } 22040 22041 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22042 template <typename Dispatch> 22043 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>>::type createCudaModuleNVUnique(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22044 Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, 22045 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22046 Dispatch const & d ) const 22047 { 22048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22049 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22050 VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 22051 # endif 22052 22053 VULKAN_HPP_NAMESPACE::CudaModuleNV module; 22054 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22055 d.vkCreateCudaModuleNV( m_device, 22056 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), 22057 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22058 reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); 22059 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); 22060 22061 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 22062 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 22063 } 22064 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22065 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22066 22067 template <typename Dispatch> getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,size_t * pCacheSize,void * pCacheData,Dispatch const & d) const22068 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 22069 size_t * pCacheSize, 22070 void * pCacheData, 22071 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22072 { 22073 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22074 return static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) ); 22075 } 22076 22077 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22078 template <typename Uint8_tAllocator, typename Dispatch> 22079 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Dispatch const & d) const22080 Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const 22081 { 22082 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22083 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22084 VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" ); 22085 # endif 22086 22087 std::vector<uint8_t, Uint8_tAllocator> cacheData; 22088 size_t cacheSize; 22089 VULKAN_HPP_NAMESPACE::Result result; 22090 do 22091 { 22092 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); 22093 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) 22094 { 22095 cacheData.resize( cacheSize ); 22096 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22097 d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); 22098 } 22099 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22100 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); 22101 VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); 22102 if ( cacheSize < cacheData.size() ) 22103 { 22104 cacheData.resize( cacheSize ); 22105 } 22106 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); 22107 } 22108 22109 template <typename Uint8_tAllocator, 22110 typename Dispatch, 22111 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 22112 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const22113 Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 22114 { 22115 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22116 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22117 VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" ); 22118 # endif 22119 22120 std::vector<uint8_t, Uint8_tAllocator> cacheData( uint8_tAllocator ); 22121 size_t cacheSize; 22122 VULKAN_HPP_NAMESPACE::Result result; 22123 do 22124 { 22125 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); 22126 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) 22127 { 22128 cacheData.resize( cacheSize ); 22129 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22130 d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); 22131 } 22132 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22133 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); 22134 VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); 22135 if ( cacheSize < cacheData.size() ) 22136 { 22137 cacheData.resize( cacheSize ); 22138 } 22139 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); 22140 } 22141 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22142 22143 template <typename Dispatch> createCudaFunctionNV(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction,Dispatch const & d) const22144 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, 22145 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22146 VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, 22147 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22148 { 22149 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22150 return static_cast<Result>( d.vkCreateCudaFunctionNV( m_device, 22151 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ), 22152 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22153 reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) ); 22154 } 22155 22156 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22157 template <typename Dispatch> 22158 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::type createCudaFunctionNV(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22159 Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, 22160 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22161 Dispatch const & d ) const 22162 { 22163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22164 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22165 VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 22166 # endif 22167 22168 VULKAN_HPP_NAMESPACE::CudaFunctionNV function; 22169 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22170 d.vkCreateCudaFunctionNV( m_device, 22171 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), 22172 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22173 reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); 22174 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); 22175 22176 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); 22177 } 22178 22179 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22180 template <typename Dispatch> 22181 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>>::type createCudaFunctionNVUnique(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22182 Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, 22183 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22184 Dispatch const & d ) const 22185 { 22186 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22187 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22188 VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 22189 # endif 22190 22191 VULKAN_HPP_NAMESPACE::CudaFunctionNV function; 22192 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22193 d.vkCreateCudaFunctionNV( m_device, 22194 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), 22195 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22196 reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); 22197 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); 22198 22199 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 22200 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 22201 } 22202 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22203 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22204 22205 template <typename Dispatch> destroyCudaModuleNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22206 VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 22207 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22208 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22209 { 22210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22211 d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 22212 } 22213 22214 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22215 template <typename Dispatch> destroyCudaModuleNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22216 VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 22217 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22218 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22219 { 22220 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22221 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22222 VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 22223 # endif 22224 22225 d.vkDestroyCudaModuleNV( m_device, 22226 static_cast<VkCudaModuleNV>( module ), 22227 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 22228 } 22229 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22230 22231 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaModuleNV module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22232 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 22233 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22234 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22235 { 22236 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22237 d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 22238 } 22239 22240 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22241 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22242 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 22243 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22244 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22245 { 22246 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22247 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22248 VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 22249 # endif 22250 22251 d.vkDestroyCudaModuleNV( m_device, 22252 static_cast<VkCudaModuleNV>( module ), 22253 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 22254 } 22255 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22256 22257 template <typename Dispatch> destroyCudaFunctionNV(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22258 VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 22259 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22260 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22261 { 22262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22263 d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 22264 } 22265 22266 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22267 template <typename Dispatch> destroyCudaFunctionNV(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22268 VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 22269 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22270 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22271 { 22272 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22273 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22274 VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 22275 # endif 22276 22277 d.vkDestroyCudaFunctionNV( m_device, 22278 static_cast<VkCudaFunctionNV>( function ), 22279 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 22280 } 22281 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22282 22283 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22284 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 22285 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22286 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22287 { 22288 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22289 d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 22290 } 22291 22292 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22293 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22294 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 22295 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22297 { 22298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22299 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22300 VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 22301 # endif 22302 22303 d.vkDestroyCudaFunctionNV( m_device, 22304 static_cast<VkCudaFunctionNV>( function ), 22305 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 22306 } 22307 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22308 22309 template <typename Dispatch> cudaLaunchKernelNV(const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo,Dispatch const & d) const22310 VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, 22311 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22312 { 22313 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22314 d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) ); 22315 } 22316 22317 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22318 template <typename Dispatch> cudaLaunchKernelNV(const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo,Dispatch const & d) const22319 VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, 22320 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22321 { 22322 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22323 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22324 VULKAN_HPP_ASSERT( d.vkCmdCudaLaunchKernelNV && "Function <vkCmdCudaLaunchKernelNV> requires <VK_NV_cuda_kernel_launch>" ); 22325 # endif 22326 22327 d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) ); 22328 } 22329 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22330 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 22331 22332 #if defined( VK_USE_PLATFORM_METAL_EXT ) 22333 //=== VK_EXT_metal_objects === 22334 22335 template <typename Dispatch> exportMetalObjectsEXT(VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,Dispatch const & d) const22336 VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, 22337 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22338 { 22339 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22340 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) ); 22341 } 22342 22343 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22344 template <typename Dispatch> 22345 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT(Dispatch const & d) const22346 Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22347 { 22348 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22349 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22350 VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" ); 22351 # endif 22352 22353 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; 22354 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) ); 22355 22356 return metalObjectsInfo; 22357 } 22358 22359 template <typename X, typename Y, typename... Z, typename Dispatch> 22360 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> exportMetalObjectsEXT(Dispatch const & d) const22361 Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22362 { 22363 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22364 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22365 VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" ); 22366 # endif 22367 22368 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 22369 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>(); 22370 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) ); 22371 22372 return structureChain; 22373 } 22374 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22375 #endif /*VK_USE_PLATFORM_METAL_EXT*/ 22376 22377 //=== VK_KHR_synchronization2 === 22378 22379 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const22380 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 22381 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 22382 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22383 { 22384 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22385 d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 22386 } 22387 22388 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22389 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const22390 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 22391 const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 22392 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22393 { 22394 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22395 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22396 VULKAN_HPP_ASSERT( d.vkCmdSetEvent2KHR && "Function <vkCmdSetEvent2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 22397 # endif 22398 22399 d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 22400 } 22401 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22402 22403 template <typename Dispatch> resetEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const22404 VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 22405 VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, 22406 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22407 { 22408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22409 d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); 22410 } 22411 22412 template <typename Dispatch> waitEvents2KHR(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const22413 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, 22414 const VULKAN_HPP_NAMESPACE::Event * pEvents, 22415 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, 22416 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22417 { 22418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22419 d.vkCmdWaitEvents2KHR( 22420 m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); 22421 } 22422 22423 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22424 template <typename Dispatch> waitEvents2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const22425 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 22426 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, 22427 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 22428 { 22429 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22430 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22431 VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2KHR && "Function <vkCmdWaitEvents2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 22432 # endif 22433 # ifdef VULKAN_HPP_NO_EXCEPTIONS 22434 VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); 22435 # else 22436 if ( events.size() != dependencyInfos.size() ) 22437 { 22438 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); 22439 } 22440 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 22441 22442 d.vkCmdWaitEvents2KHR( m_commandBuffer, 22443 events.size(), 22444 reinterpret_cast<const VkEvent *>( events.data() ), 22445 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); 22446 } 22447 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22448 22449 template <typename Dispatch> pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const22450 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 22451 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22452 { 22453 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22454 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 22455 } 22456 22457 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22458 template <typename Dispatch> pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const22459 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 22460 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22461 { 22462 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22463 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22464 VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2KHR && "Function <vkCmdPipelineBarrier2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 22465 # endif 22466 22467 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 22468 } 22469 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22470 22471 template <typename Dispatch> writeTimestamp2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const22472 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 22473 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 22474 uint32_t query, 22475 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22476 { 22477 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22478 d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); 22479 } 22480 22481 template <typename Dispatch> submit2KHR(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const22482 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, 22483 const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, 22484 VULKAN_HPP_NAMESPACE::Fence fence, 22485 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22486 { 22487 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22488 return static_cast<Result>( 22489 d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 22490 } 22491 22492 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22493 template <typename Dispatch> submit2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const22494 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR( 22495 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 22496 { 22497 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22498 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22499 VULKAN_HPP_ASSERT( d.vkQueueSubmit2KHR && "Function <vkQueueSubmit2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 22500 # endif 22501 22502 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22503 d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 22504 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); 22505 22506 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 22507 } 22508 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22509 22510 template <typename Dispatch> writeBufferMarker2AMD(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const22511 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 22512 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 22513 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 22514 uint32_t marker, 22515 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22516 { 22517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22518 d.vkCmdWriteBufferMarker2AMD( 22519 m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker ); 22520 } 22521 22522 template <typename Dispatch> getCheckpointData2NV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,Dispatch const & d) const22523 VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, 22524 VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, 22525 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22526 { 22527 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22528 d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) ); 22529 } 22530 22531 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22532 template <typename CheckpointData2NVAllocator, typename Dispatch> 22533 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(Dispatch const & d) const22534 Queue::getCheckpointData2NV( Dispatch const & d ) const 22535 { 22536 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22537 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22538 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_KHR_synchronization2>" ); 22539 # endif 22540 22541 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData; 22542 uint32_t checkpointDataCount; 22543 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 22544 checkpointData.resize( checkpointDataCount ); 22545 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 22546 22547 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 22548 if ( checkpointDataCount < checkpointData.size() ) 22549 { 22550 checkpointData.resize( checkpointDataCount ); 22551 } 22552 return checkpointData; 22553 } 22554 22555 template <typename CheckpointData2NVAllocator, 22556 typename Dispatch, 22557 typename std::enable_if<std::is_same<typename CheckpointData2NVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, int>::type> 22558 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(CheckpointData2NVAllocator & checkpointData2NVAllocator,Dispatch const & d) const22559 Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const 22560 { 22561 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22562 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22563 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_KHR_synchronization2>" ); 22564 # endif 22565 22566 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator ); 22567 uint32_t checkpointDataCount; 22568 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 22569 checkpointData.resize( checkpointDataCount ); 22570 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 22571 22572 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 22573 if ( checkpointDataCount < checkpointData.size() ) 22574 { 22575 checkpointData.resize( checkpointDataCount ); 22576 } 22577 return checkpointData; 22578 } 22579 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22580 22581 //=== VK_EXT_descriptor_buffer === 22582 22583 template <typename Dispatch> getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes,Dispatch const & d) const22584 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, 22585 VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, 22586 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22587 { 22588 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22589 d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) ); 22590 } 22591 22592 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22593 template <typename Dispatch> 22594 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,Dispatch const & d) const22595 Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22596 { 22597 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22599 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSizeEXT && "Function <vkGetDescriptorSetLayoutSizeEXT> requires <VK_EXT_descriptor_buffer>" ); 22600 # endif 22601 22602 VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; 22603 d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) ); 22604 22605 return layoutSizeInBytes; 22606 } 22607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22608 22609 template <typename Dispatch> getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,uint32_t binding,VULKAN_HPP_NAMESPACE::DeviceSize * pOffset,Dispatch const & d) const22610 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, 22611 uint32_t binding, 22612 VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, 22613 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22614 { 22615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22616 d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) ); 22617 } 22618 22619 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22620 template <typename Dispatch> getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,uint32_t binding,Dispatch const & d) const22621 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT( 22622 VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22623 { 22624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22625 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22626 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutBindingOffsetEXT && 22627 "Function <vkGetDescriptorSetLayoutBindingOffsetEXT> requires <VK_EXT_descriptor_buffer>" ); 22628 # endif 22629 22630 VULKAN_HPP_NAMESPACE::DeviceSize offset; 22631 d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) ); 22632 22633 return offset; 22634 } 22635 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22636 22637 template <typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo,size_t dataSize,void * pDescriptor,Dispatch const & d) const22638 VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, 22639 size_t dataSize, 22640 void * pDescriptor, 22641 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22642 { 22643 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22644 d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor ); 22645 } 22646 22647 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22648 template <typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,size_t dataSize,void * pDescriptor,Dispatch const & d) const22649 VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, 22650 size_t dataSize, 22651 void * pDescriptor, 22652 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22653 { 22654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22655 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22656 VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" ); 22657 # endif 22658 22659 d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), dataSize, pDescriptor ); 22660 } 22661 22662 template <typename DescriptorType, typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,Dispatch const & d) const22663 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, 22664 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22665 { 22666 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22667 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22668 VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" ); 22669 # endif 22670 22671 DescriptorType descriptor; 22672 d.vkGetDescriptorEXT( 22673 m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) ); 22674 22675 return descriptor; 22676 } 22677 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22678 22679 template <typename Dispatch> bindDescriptorBuffersEXT(uint32_t bufferCount,const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos,Dispatch const & d) const22680 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount, 22681 const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, 22682 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22683 { 22684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22685 d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) ); 22686 } 22687 22688 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22689 template <typename Dispatch> 22690 VULKAN_HPP_INLINE void bindDescriptorBuffersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos,Dispatch const & d) const22691 CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos, 22692 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22693 { 22694 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22695 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22696 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBuffersEXT && "Function <vkCmdBindDescriptorBuffersEXT> requires <VK_EXT_descriptor_buffer>" ); 22697 # endif 22698 22699 d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) ); 22700 } 22701 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22702 22703 template <typename Dispatch> setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t setCount,const uint32_t * pBufferIndices,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const22704 VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 22705 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 22706 uint32_t firstSet, 22707 uint32_t setCount, 22708 const uint32_t * pBufferIndices, 22709 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 22710 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22711 { 22712 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22713 d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, 22714 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 22715 static_cast<VkPipelineLayout>( layout ), 22716 firstSet, 22717 setCount, 22718 pBufferIndices, 22719 reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); 22720 } 22721 22722 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22723 template <typename Dispatch> setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const22724 VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 22725 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 22726 uint32_t firstSet, 22727 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices, 22728 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 22729 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 22730 { 22731 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22732 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22733 VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsetsEXT && "Function <vkCmdSetDescriptorBufferOffsetsEXT> requires <VK_EXT_descriptor_buffer>" ); 22734 # endif 22735 # ifdef VULKAN_HPP_NO_EXCEPTIONS 22736 VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); 22737 # else 22738 if ( bufferIndices.size() != offsets.size() ) 22739 { 22740 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); 22741 } 22742 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 22743 22744 d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, 22745 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 22746 static_cast<VkPipelineLayout>( layout ), 22747 firstSet, 22748 bufferIndices.size(), 22749 bufferIndices.data(), 22750 reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); 22751 } 22752 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22753 22754 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplersEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,Dispatch const & d) const22755 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 22756 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 22757 uint32_t set, 22758 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22759 { 22760 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22761 d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( 22762 m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set ); 22763 } 22764 22765 template <typename Dispatch> getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22766 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT( 22767 const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22768 { 22769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22770 return static_cast<Result>( 22771 d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22772 } 22773 22774 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22775 template <typename DataType, typename Dispatch> 22776 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22777 Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 22778 { 22779 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22780 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22781 VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureDescriptorDataEXT && 22782 "Function <vkGetBufferOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22783 # endif 22784 22785 DataType data; 22786 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22787 d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22788 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); 22789 22790 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22791 } 22792 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22793 22794 template <typename Dispatch> getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22795 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT( 22796 const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22797 { 22798 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22799 return static_cast<Result>( 22800 d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22801 } 22802 22803 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22804 template <typename DataType, typename Dispatch> 22805 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22806 Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 22807 { 22808 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22809 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22810 VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDescriptorDataEXT && 22811 "Function <vkGetImageOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22812 # endif 22813 22814 DataType data; 22815 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22816 d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22817 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); 22818 22819 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22820 } 22821 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22822 22823 template <typename Dispatch> getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22824 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT( 22825 const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22826 { 22827 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22828 return static_cast<Result>( 22829 d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22830 } 22831 22832 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22833 template <typename DataType, typename Dispatch> 22834 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22835 Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 22836 { 22837 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22838 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22839 VULKAN_HPP_ASSERT( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT && 22840 "Function <vkGetImageViewOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22841 # endif 22842 22843 DataType data; 22844 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22845 d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22846 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); 22847 22848 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22849 } 22850 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22851 22852 template <typename Dispatch> getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22853 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT( 22854 const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22855 { 22856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22857 return static_cast<Result>( 22858 d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22859 } 22860 22861 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22862 template <typename DataType, typename Dispatch> 22863 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22864 Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 22865 { 22866 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22867 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22868 VULKAN_HPP_ASSERT( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT && 22869 "Function <vkGetSamplerOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22870 # endif 22871 22872 DataType data; 22873 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22874 d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22875 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); 22876 22877 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22878 } 22879 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22880 22881 template <typename Dispatch> getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22882 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( 22883 const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22884 { 22885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22886 return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( 22887 m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22888 } 22889 22890 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22891 template <typename DataType, typename Dispatch> 22892 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22893 Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, 22894 Dispatch const & d ) const 22895 { 22896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22897 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22898 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && 22899 "Function <vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22900 # endif 22901 22902 DataType data; 22903 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( 22904 m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22905 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); 22906 22907 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22908 } 22909 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22910 22911 //=== VK_NV_fragment_shading_rate_enums === 22912 22913 template <typename Dispatch> setFragmentShadingRateEnumNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const22914 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, 22915 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 22916 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22917 { 22918 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22919 d.vkCmdSetFragmentShadingRateEnumNV( 22920 m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 22921 } 22922 22923 //=== VK_EXT_mesh_shader === 22924 22925 template <typename Dispatch> 22926 VULKAN_HPP_INLINE void drawMeshTasksEXT(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const22927 CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22928 { 22929 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22930 d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 22931 } 22932 22933 template <typename Dispatch> drawMeshTasksIndirectEXT(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const22934 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, 22935 VULKAN_HPP_NAMESPACE::DeviceSize offset, 22936 uint32_t drawCount, 22937 uint32_t stride, 22938 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22939 { 22940 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22941 d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 22942 } 22943 22944 template <typename Dispatch> drawMeshTasksIndirectCountEXT(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const22945 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, 22946 VULKAN_HPP_NAMESPACE::DeviceSize offset, 22947 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 22948 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 22949 uint32_t maxDrawCount, 22950 uint32_t stride, 22951 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22952 { 22953 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22954 d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer, 22955 static_cast<VkBuffer>( buffer ), 22956 static_cast<VkDeviceSize>( offset ), 22957 static_cast<VkBuffer>( countBuffer ), 22958 static_cast<VkDeviceSize>( countBufferOffset ), 22959 maxDrawCount, 22960 stride ); 22961 } 22962 22963 //=== VK_KHR_copy_commands2 === 22964 22965 template <typename Dispatch> copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const22966 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, 22967 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22968 { 22969 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22970 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); 22971 } 22972 22973 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22974 template <typename Dispatch> copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const22975 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, 22976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22977 { 22978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22979 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22980 VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2KHR && "Function <vkCmdCopyBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 22981 # endif 22982 22983 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); 22984 } 22985 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22986 22987 template <typename Dispatch> copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const22988 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, 22989 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22990 { 22991 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22992 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); 22993 } 22994 22995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22996 template <typename Dispatch> copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const22997 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, 22998 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22999 { 23000 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23001 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23002 VULKAN_HPP_ASSERT( d.vkCmdCopyImage2KHR && "Function <vkCmdCopyImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23003 # endif 23004 23005 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); 23006 } 23007 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23008 23009 template <typename Dispatch> copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const23010 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, 23011 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23012 { 23013 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23014 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); 23015 } 23016 23017 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23018 template <typename Dispatch> copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const23019 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, 23020 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23021 { 23022 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23023 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23024 VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2KHR && "Function <vkCmdCopyBufferToImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23025 # endif 23026 23027 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); 23028 } 23029 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23030 23031 template <typename Dispatch> copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const23032 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, 23033 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23034 { 23035 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23036 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); 23037 } 23038 23039 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23040 template <typename Dispatch> copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const23041 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, 23042 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23043 { 23044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23045 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23046 VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2KHR && "Function <vkCmdCopyImageToBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23047 # endif 23048 23049 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); 23050 } 23051 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23052 23053 template <typename Dispatch> blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const23054 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, 23055 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23056 { 23057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23058 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); 23059 } 23060 23061 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23062 template <typename Dispatch> blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const23063 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, 23064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23065 { 23066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23067 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23068 VULKAN_HPP_ASSERT( d.vkCmdBlitImage2KHR && "Function <vkCmdBlitImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23069 # endif 23070 23071 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); 23072 } 23073 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23074 23075 template <typename Dispatch> resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const23076 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, 23077 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23078 { 23079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23080 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); 23081 } 23082 23083 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23084 template <typename Dispatch> resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const23085 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, 23086 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23087 { 23088 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23089 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23090 VULKAN_HPP_ASSERT( d.vkCmdResolveImage2KHR && "Function <vkCmdResolveImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23091 # endif 23092 23093 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); 23094 } 23095 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23096 23097 //=== VK_EXT_device_fault === 23098 23099 template <typename Dispatch> getFaultInfoEXT(VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,Dispatch const & d) const23100 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, 23101 VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, 23102 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23103 { 23104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23105 return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( 23106 m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) ); 23107 } 23108 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 23109 //=== VK_NV_acquire_winrt_display === 23110 23111 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 23112 template <typename Dispatch> acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const23113 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, 23114 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23115 { 23116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23117 return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 23118 } 23119 # else 23120 template <typename Dispatch> 23121 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const23122 PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 23123 { 23124 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23125 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23126 VULKAN_HPP_ASSERT( d.vkAcquireWinrtDisplayNV && "Function <vkAcquireWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); 23127 # endif 23128 23129 VULKAN_HPP_NAMESPACE::Result result = 23130 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 23131 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); 23132 23133 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 23134 } 23135 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 23136 23137 template <typename Dispatch> getWinrtDisplayNV(uint32_t deviceRelativeId,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const23138 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, 23139 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 23140 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23141 { 23142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23143 return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 23144 } 23145 23146 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23147 template <typename Dispatch> 23148 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getWinrtDisplayNV(uint32_t deviceRelativeId,Dispatch const & d) const23149 PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const 23150 { 23151 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23152 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23153 VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); 23154 # endif 23155 23156 VULKAN_HPP_NAMESPACE::DisplayKHR display; 23157 VULKAN_HPP_NAMESPACE::Result result = 23158 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 23159 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); 23160 23161 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); 23162 } 23163 23164 # ifndef VULKAN_HPP_NO_SMART_HANDLE 23165 template <typename Dispatch> 23166 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getWinrtDisplayNVUnique(uint32_t deviceRelativeId,Dispatch const & d) const23167 PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const 23168 { 23169 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23170 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23171 VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); 23172 # endif 23173 23174 VULKAN_HPP_NAMESPACE::DisplayKHR display; 23175 VULKAN_HPP_NAMESPACE::Result result = 23176 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 23177 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); 23178 23179 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 23180 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 23181 } 23182 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23183 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23184 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 23185 23186 #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) 23187 //=== VK_EXT_directfb_surface === 23188 23189 template <typename Dispatch> createDirectFBSurfaceEXT(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const23190 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, 23191 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23192 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 23193 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23194 { 23195 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23196 return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, 23197 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), 23198 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 23199 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 23200 } 23201 23202 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23203 template <typename Dispatch> 23204 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDirectFBSurfaceEXT(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23205 Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, 23206 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23207 Dispatch const & d ) const 23208 { 23209 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23210 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23211 VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" ); 23212 # endif 23213 23214 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 23215 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT( 23216 m_instance, 23217 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 23218 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23219 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 23220 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); 23221 23222 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 23223 } 23224 23225 # ifndef VULKAN_HPP_NO_SMART_HANDLE 23226 template <typename Dispatch> 23227 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDirectFBSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23228 Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, 23229 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23230 Dispatch const & d ) const 23231 { 23232 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23233 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23234 VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" ); 23235 # endif 23236 23237 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 23238 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT( 23239 m_instance, 23240 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 23241 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23242 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 23243 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); 23244 23245 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 23246 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 23247 } 23248 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23249 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23250 23251 template <typename Dispatch> getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB * dfb,Dispatch const & d) const23252 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, 23253 IDirectFB * dfb, 23254 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23255 { 23256 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23257 return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); 23258 } 23259 23260 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23261 template <typename Dispatch> 23262 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB & dfb,Dispatch const & d) const23263 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23264 { 23265 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23266 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23267 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT && 23268 "Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> requires <VK_EXT_directfb_surface>" ); 23269 # endif 23270 23271 VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); 23272 23273 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 23274 } 23275 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23276 #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ 23277 23278 //=== VK_EXT_vertex_input_dynamic_state === 23279 23280 template <typename Dispatch> setVertexInputEXT(uint32_t vertexBindingDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,Dispatch const & d) const23281 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, 23282 const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, 23283 uint32_t vertexAttributeDescriptionCount, 23284 const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, 23285 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23286 { 23287 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23288 d.vkCmdSetVertexInputEXT( m_commandBuffer, 23289 vertexBindingDescriptionCount, 23290 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ), 23291 vertexAttributeDescriptionCount, 23292 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) ); 23293 } 23294 23295 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23296 template <typename Dispatch> setVertexInputEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,Dispatch const & d) const23297 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( 23298 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, 23299 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions, 23300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23301 { 23302 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23303 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23304 VULKAN_HPP_ASSERT( d.vkCmdSetVertexInputEXT && "Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" ); 23305 # endif 23306 23307 d.vkCmdSetVertexInputEXT( m_commandBuffer, 23308 vertexBindingDescriptions.size(), 23309 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ), 23310 vertexAttributeDescriptions.size(), 23311 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) ); 23312 } 23313 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23314 23315 #if defined( VK_USE_PLATFORM_FUCHSIA ) 23316 //=== VK_FUCHSIA_external_memory === 23317 23318 template <typename Dispatch> 23319 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const23320 Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 23321 zx_handle_t * pZirconHandle, 23322 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23323 { 23324 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23325 return static_cast<Result>( 23326 d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); 23327 } 23328 23329 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23330 template <typename Dispatch> 23331 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getMemoryZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const23332 Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const 23333 { 23334 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23335 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23336 VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandleFUCHSIA && "Function <vkGetMemoryZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_memory>" ); 23337 # endif 23338 23339 zx_handle_t zirconHandle; 23340 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23341 d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); 23342 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); 23343 23344 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); 23345 } 23346 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23347 23348 template <typename Dispatch> 23349 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,Dispatch const & d) const23350 Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 23351 zx_handle_t zirconHandle, 23352 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, 23353 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23354 { 23355 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23356 return static_cast<Result>( 23357 d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, 23358 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 23359 zirconHandle, 23360 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) ); 23361 } 23362 23363 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23364 template <typename Dispatch> 23365 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,Dispatch const & d) const23366 Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 23367 zx_handle_t zirconHandle, 23368 Dispatch const & d ) const 23369 { 23370 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23371 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23372 VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandlePropertiesFUCHSIA && 23373 "Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> requires <VK_FUCHSIA_external_memory>" ); 23374 # endif 23375 23376 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; 23377 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23378 d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, 23379 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 23380 zirconHandle, 23381 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) ); 23382 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); 23383 23384 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryZirconHandleProperties ) ); 23385 } 23386 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23387 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 23388 23389 #if defined( VK_USE_PLATFORM_FUCHSIA ) 23390 //=== VK_FUCHSIA_external_semaphore === 23391 23392 template <typename Dispatch> importSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,Dispatch const & d) const23393 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( 23394 const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23395 { 23396 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23397 return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 23398 m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) ); 23399 } 23400 23401 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23402 template <typename Dispatch> 23403 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,Dispatch const & d) const23404 Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, 23405 Dispatch const & d ) const 23406 { 23407 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23408 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23409 VULKAN_HPP_ASSERT( d.vkImportSemaphoreZirconHandleFUCHSIA && "Function <vkImportSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" ); 23410 # endif 23411 23412 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 23413 m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) ); 23414 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); 23415 23416 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 23417 } 23418 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23419 23420 template <typename Dispatch> 23421 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const23422 Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 23423 zx_handle_t * pZirconHandle, 23424 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23425 { 23426 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23427 return static_cast<Result>( 23428 d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); 23429 } 23430 23431 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23432 template <typename Dispatch> 23433 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const23434 Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const 23435 { 23436 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23437 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23438 VULKAN_HPP_ASSERT( d.vkGetSemaphoreZirconHandleFUCHSIA && "Function <vkGetSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" ); 23439 # endif 23440 23441 zx_handle_t zirconHandle; 23442 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23443 d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); 23444 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); 23445 23446 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); 23447 } 23448 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23449 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 23450 23451 #if defined( VK_USE_PLATFORM_FUCHSIA ) 23452 //=== VK_FUCHSIA_buffer_collection === 23453 23454 template <typename Dispatch> 23455 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createBufferCollectionFUCHSIA(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,Dispatch const & d) const23456 Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, 23457 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23458 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, 23459 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23460 { 23461 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23462 return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device, 23463 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ), 23464 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 23465 reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) ); 23466 } 23467 23468 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23469 template <typename Dispatch> 23470 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type createBufferCollectionFUCHSIA(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23471 Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, 23472 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23473 Dispatch const & d ) const 23474 { 23475 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23476 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23477 VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23478 # endif 23479 23480 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; 23481 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA( 23482 m_device, 23483 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), 23484 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23485 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); 23486 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); 23487 23488 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( collection ) ); 23489 } 23490 23491 # ifndef VULKAN_HPP_NO_SMART_HANDLE 23492 template <typename Dispatch> 23493 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type createBufferCollectionFUCHSIAUnique(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23494 Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, 23495 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23496 Dispatch const & d ) const 23497 { 23498 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23499 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23500 VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23501 # endif 23502 23503 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; 23504 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA( 23505 m_device, 23506 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), 23507 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23508 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); 23509 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); 23510 23511 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 23512 result, UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 23513 } 23514 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23515 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23516 23517 template <typename Dispatch> 23518 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result setBufferCollectionImageConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,Dispatch const & d) const23519 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23520 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, 23521 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23522 { 23523 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23524 return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( 23525 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) ); 23526 } 23527 23528 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23529 template <typename Dispatch> 23530 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setBufferCollectionImageConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,Dispatch const & d) const23531 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23532 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, 23533 Dispatch const & d ) const 23534 { 23535 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23536 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23537 VULKAN_HPP_ASSERT( d.vkSetBufferCollectionImageConstraintsFUCHSIA && 23538 "Function <vkSetBufferCollectionImageConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23539 # endif 23540 23541 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( 23542 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) ) ); 23543 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); 23544 23545 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 23546 } 23547 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23548 23549 template <typename Dispatch> 23550 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result setBufferCollectionBufferConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,Dispatch const & d) const23551 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23552 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, 23553 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23554 { 23555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23556 return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( 23557 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) ); 23558 } 23559 23560 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23561 template <typename Dispatch> 23562 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setBufferCollectionBufferConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,Dispatch const & d) const23563 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23564 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, 23565 Dispatch const & d ) const 23566 { 23567 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23568 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23569 VULKAN_HPP_ASSERT( d.vkSetBufferCollectionBufferConstraintsFUCHSIA && 23570 "Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23571 # endif 23572 23573 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( 23574 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) ) ); 23575 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); 23576 23577 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 23578 } 23579 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23580 23581 template <typename Dispatch> destroyBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23582 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23583 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23584 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23585 { 23586 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23587 d.vkDestroyBufferCollectionFUCHSIA( 23588 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23589 } 23590 23591 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23592 template <typename Dispatch> destroyBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23593 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23594 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23595 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23596 { 23597 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23599 VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23600 # endif 23601 23602 d.vkDestroyBufferCollectionFUCHSIA( 23603 m_device, 23604 static_cast<VkBufferCollectionFUCHSIA>( collection ), 23605 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23606 } 23607 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23608 23609 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23610 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23611 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23612 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23613 { 23614 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23615 d.vkDestroyBufferCollectionFUCHSIA( 23616 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23617 } 23618 23619 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23620 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23621 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23622 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23623 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23624 { 23625 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23626 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23627 VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23628 # endif 23629 23630 d.vkDestroyBufferCollectionFUCHSIA( 23631 m_device, 23632 static_cast<VkBufferCollectionFUCHSIA>( collection ), 23633 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23634 } 23635 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23636 23637 template <typename Dispatch> 23638 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getBufferCollectionPropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,Dispatch const & d) const23639 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23640 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, 23641 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23642 { 23643 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23644 return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( 23645 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) ); 23646 } 23647 23648 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23649 template <typename Dispatch> 23650 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type getBufferCollectionPropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Dispatch const & d) const23651 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const 23652 { 23653 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23654 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23655 VULKAN_HPP_ASSERT( d.vkGetBufferCollectionPropertiesFUCHSIA && 23656 "Function <vkGetBufferCollectionPropertiesFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23657 # endif 23658 23659 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; 23660 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( 23661 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) ) ); 23662 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); 23663 23664 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 23665 } 23666 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23667 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 23668 23669 //=== VK_HUAWEI_subpass_shading === 23670 23671 template <typename Dispatch> getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,Dispatch const & d) const23672 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, 23673 VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, 23674 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23675 { 23676 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23677 return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 23678 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) ); 23679 } 23680 23681 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23682 template <typename Dispatch> 23683 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Extent2D>::type getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,Dispatch const & d) const23684 Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const 23685 { 23686 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23687 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23688 VULKAN_HPP_ASSERT( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && 23689 "Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> requires <VK_HUAWEI_subpass_shading>" ); 23690 # endif 23691 23692 VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; 23693 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 23694 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) ); 23695 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); 23696 23697 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( maxWorkgroupSize ) ); 23698 } 23699 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23700 23701 template <typename Dispatch> subpassShadingHUAWEI(Dispatch const & d) const23702 VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23703 { 23704 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23705 d.vkCmdSubpassShadingHUAWEI( m_commandBuffer ); 23706 } 23707 23708 //=== VK_HUAWEI_invocation_mask === 23709 23710 template <typename Dispatch> bindInvocationMaskHUAWEI(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,Dispatch const & d) const23711 VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, 23712 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 23713 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23714 { 23715 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23716 d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); 23717 } 23718 23719 //=== VK_NV_external_memory_rdma === 23720 23721 template <typename Dispatch> 23722 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,Dispatch const & d) const23723 Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, 23724 VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, 23725 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23726 { 23727 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23728 return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( 23729 m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) ); 23730 } 23731 23732 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23733 template <typename Dispatch> 23734 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo,Dispatch const & d) const23735 Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const 23736 { 23737 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23738 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23739 VULKAN_HPP_ASSERT( d.vkGetMemoryRemoteAddressNV && "Function <vkGetMemoryRemoteAddressNV> requires <VK_NV_external_memory_rdma>" ); 23740 # endif 23741 23742 VULKAN_HPP_NAMESPACE::RemoteAddressNV address; 23743 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryRemoteAddressNV( 23744 m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) ) ); 23745 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); 23746 23747 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( address ) ); 23748 } 23749 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23750 23751 //=== VK_EXT_pipeline_properties === 23752 23753 template <typename Dispatch> getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo,VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,Dispatch const & d) const23754 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, 23755 VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, 23756 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23757 { 23758 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23759 return static_cast<Result>( d.vkGetPipelinePropertiesEXT( 23760 m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) ); 23761 } 23762 23763 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23764 template <typename Dispatch> 23765 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo,Dispatch const & d) const23766 Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const 23767 { 23768 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23769 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23770 VULKAN_HPP_ASSERT( d.vkGetPipelinePropertiesEXT && "Function <vkGetPipelinePropertiesEXT> requires <VK_EXT_pipeline_properties>" ); 23771 # endif 23772 23773 VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; 23774 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelinePropertiesEXT( 23775 m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) ) ); 23776 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); 23777 23778 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineProperties ) ); 23779 } 23780 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23781 23782 //=== VK_EXT_extended_dynamic_state2 === 23783 23784 template <typename Dispatch> setPatchControlPointsEXT(uint32_t patchControlPoints,Dispatch const & d) const23785 VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23786 { 23787 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23788 d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); 23789 } 23790 23791 template <typename Dispatch> setRasterizerDiscardEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const23792 VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, 23793 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23794 { 23795 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23796 d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); 23797 } 23798 23799 template <typename Dispatch> setDepthBiasEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const23800 VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23801 { 23802 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23803 d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); 23804 } 23805 23806 template <typename Dispatch> setLogicOpEXT(VULKAN_HPP_NAMESPACE::LogicOp logicOp,Dispatch const & d) const23807 VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23808 { 23809 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23810 d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) ); 23811 } 23812 23813 template <typename Dispatch> setPrimitiveRestartEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const23814 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, 23815 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23816 { 23817 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23818 d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); 23819 } 23820 23821 #if defined( VK_USE_PLATFORM_SCREEN_QNX ) 23822 //=== VK_QNX_screen_surface === 23823 23824 template <typename Dispatch> createScreenSurfaceQNX(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const23825 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, 23826 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23827 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 23828 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23829 { 23830 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23831 return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance, 23832 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ), 23833 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 23834 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 23835 } 23836 23837 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23838 template <typename Dispatch> 23839 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createScreenSurfaceQNX(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23840 Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, 23841 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23842 Dispatch const & d ) const 23843 { 23844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23845 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23846 VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" ); 23847 # endif 23848 23849 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 23850 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX( 23851 m_instance, 23852 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 23853 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23854 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 23855 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); 23856 23857 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 23858 } 23859 23860 # ifndef VULKAN_HPP_NO_SMART_HANDLE 23861 template <typename Dispatch> 23862 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createScreenSurfaceQNXUnique(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23863 Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, 23864 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23865 Dispatch const & d ) const 23866 { 23867 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23868 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23869 VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" ); 23870 # endif 23871 23872 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 23873 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX( 23874 m_instance, 23875 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 23876 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23877 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 23878 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); 23879 23880 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 23881 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 23882 } 23883 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23884 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23885 23886 template <typename Dispatch> getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window * window,Dispatch const & d) const23887 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, 23888 struct _screen_window * window, 23889 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23890 { 23891 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23892 return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); 23893 } 23894 23895 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23896 template <typename Dispatch> 23897 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window & window,Dispatch const & d) const23898 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23899 { 23900 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23901 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23902 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceScreenPresentationSupportQNX && 23903 "Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> requires <VK_QNX_screen_surface>" ); 23904 # endif 23905 23906 VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); 23907 23908 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 23909 } 23910 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23911 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ 23912 23913 //=== VK_EXT_color_write_enable === 23914 23915 template <typename Dispatch> setColorWriteEnableEXT(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,Dispatch const & d) const23916 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, 23917 const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, 23918 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23919 { 23920 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23921 d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) ); 23922 } 23923 23924 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23925 template <typename Dispatch> setColorWriteEnableEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,Dispatch const & d) const23926 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables, 23927 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23928 { 23929 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23930 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23931 VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteEnableEXT && "Function <vkCmdSetColorWriteEnableEXT> requires <VK_EXT_color_write_enable>" ); 23932 # endif 23933 23934 d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) ); 23935 } 23936 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23937 23938 //=== VK_KHR_ray_tracing_maintenance1 === 23939 23940 template <typename Dispatch> traceRaysIndirect2KHR(VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const23941 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 23942 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23943 { 23944 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23945 d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 23946 } 23947 23948 //=== VK_EXT_multi_draw === 23949 23950 template <typename Dispatch> drawMultiEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,Dispatch const & d) const23951 VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, 23952 const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, 23953 uint32_t instanceCount, 23954 uint32_t firstInstance, 23955 uint32_t stride, 23956 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23957 { 23958 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23959 d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride ); 23960 } 23961 23962 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23963 template <typename Dispatch> drawMultiEXT(VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,uint32_t instanceCount,uint32_t firstInstance,Dispatch const & d) const23964 VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo, 23965 uint32_t instanceCount, 23966 uint32_t firstInstance, 23967 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23968 { 23969 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23970 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23971 VULKAN_HPP_ASSERT( d.vkCmdDrawMultiEXT && "Function <vkCmdDrawMultiEXT> requires <VK_EXT_multi_draw>" ); 23972 # endif 23973 23974 d.vkCmdDrawMultiEXT( m_commandBuffer, 23975 vertexInfo.size(), 23976 reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ), 23977 instanceCount, 23978 firstInstance, 23979 vertexInfo.stride() ); 23980 } 23981 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23982 23983 template <typename Dispatch> drawMultiIndexedEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,const int32_t * pVertexOffset,Dispatch const & d) const23984 VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, 23985 const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, 23986 uint32_t instanceCount, 23987 uint32_t firstInstance, 23988 uint32_t stride, 23989 const int32_t * pVertexOffset, 23990 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23991 { 23992 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23993 d.vkCmdDrawMultiIndexedEXT( 23994 m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset ); 23995 } 23996 23997 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23998 template <typename Dispatch> 23999 VULKAN_HPP_INLINE void drawMultiIndexedEXT(VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,uint32_t instanceCount,uint32_t firstInstance,Optional<const int32_t> vertexOffset,Dispatch const & d) const24000 CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo, 24001 uint32_t instanceCount, 24002 uint32_t firstInstance, 24003 Optional<const int32_t> vertexOffset, 24004 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24005 { 24006 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24007 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24008 VULKAN_HPP_ASSERT( d.vkCmdDrawMultiIndexedEXT && "Function <vkCmdDrawMultiIndexedEXT> requires <VK_EXT_multi_draw>" ); 24009 # endif 24010 24011 d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, 24012 indexInfo.size(), 24013 reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ), 24014 instanceCount, 24015 firstInstance, 24016 indexInfo.stride(), 24017 static_cast<const int32_t *>( vertexOffset ) ); 24018 } 24019 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24020 24021 //=== VK_EXT_opacity_micromap === 24022 24023 template <typename Dispatch> createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,Dispatch const & d) const24024 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, 24025 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24026 VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, 24027 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24028 { 24029 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24030 return static_cast<Result>( d.vkCreateMicromapEXT( m_device, 24031 reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ), 24032 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 24033 reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) ); 24034 } 24035 24036 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24037 template <typename Dispatch> 24038 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24039 Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, 24040 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24041 Dispatch const & d ) const 24042 { 24043 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24044 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24045 VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24046 # endif 24047 24048 VULKAN_HPP_NAMESPACE::MicromapEXT micromap; 24049 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24050 d.vkCreateMicromapEXT( m_device, 24051 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), 24052 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 24053 reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); 24054 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); 24055 24056 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( micromap ) ); 24057 } 24058 24059 # ifndef VULKAN_HPP_NO_SMART_HANDLE 24060 template <typename Dispatch> 24061 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type createMicromapEXTUnique(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24062 Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, 24063 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24064 Dispatch const & d ) const 24065 { 24066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24067 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24068 VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24069 # endif 24070 24071 VULKAN_HPP_NAMESPACE::MicromapEXT micromap; 24072 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24073 d.vkCreateMicromapEXT( m_device, 24074 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), 24075 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 24076 reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); 24077 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); 24078 24079 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 24080 result, UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>( micromap, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 24081 } 24082 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 24083 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24084 24085 template <typename Dispatch> destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const24086 VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 24087 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24088 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24089 { 24090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24091 d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 24092 } 24093 24094 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24095 template <typename Dispatch> destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24096 VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 24097 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24098 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24099 { 24100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24101 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24102 VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24103 # endif 24104 24105 d.vkDestroyMicromapEXT( m_device, 24106 static_cast<VkMicromapEXT>( micromap ), 24107 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 24108 } 24109 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24110 24111 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const24112 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 24113 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24114 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24115 { 24116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24117 d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 24118 } 24119 24120 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24121 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24122 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 24123 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24124 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24125 { 24126 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24127 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24128 VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24129 # endif 24130 24131 d.vkDestroyMicromapEXT( m_device, 24132 static_cast<VkMicromapEXT>( micromap ), 24133 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 24134 } 24135 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24136 24137 template <typename Dispatch> buildMicromapsEXT(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,Dispatch const & d) const24138 VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount, 24139 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, 24140 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24141 { 24142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24143 d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ); 24144 } 24145 24146 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24147 template <typename Dispatch> buildMicromapsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,Dispatch const & d) const24148 VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, 24149 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24150 { 24151 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24152 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24153 VULKAN_HPP_ASSERT( d.vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" ); 24154 # endif 24155 24156 d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ); 24157 } 24158 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24159 24160 template <typename Dispatch> buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,uint32_t infoCount,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,Dispatch const & d) const24161 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24162 uint32_t infoCount, 24163 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, 24164 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24165 { 24166 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24167 return static_cast<Result>( d.vkBuildMicromapsEXT( 24168 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) ); 24169 } 24170 24171 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24172 template <typename Dispatch> 24173 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,Dispatch const & d) const24174 Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24175 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, 24176 Dispatch const & d ) const 24177 { 24178 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24179 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24180 VULKAN_HPP_ASSERT( d.vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" ); 24181 # endif 24182 24183 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBuildMicromapsEXT( 24184 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ) ); 24185 VULKAN_HPP_NAMESPACE::detail::resultCheck( 24186 result, 24187 VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", 24188 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 24189 24190 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 24191 } 24192 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24193 24194 template <typename Dispatch> copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,Dispatch const & d) const24195 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24196 const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, 24197 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24198 { 24199 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24200 return static_cast<Result>( 24201 d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) ); 24202 } 24203 24204 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24205 template <typename Dispatch> copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,Dispatch const & d) const24206 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24207 const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, 24208 Dispatch const & d ) const 24209 { 24210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24211 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24212 VULKAN_HPP_ASSERT( d.vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24213 # endif 24214 24215 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24216 d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ) ); 24217 VULKAN_HPP_NAMESPACE::detail::resultCheck( 24218 result, 24219 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", 24220 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 24221 24222 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 24223 } 24224 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24225 24226 template <typename Dispatch> copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,Dispatch const & d) const24227 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24228 const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, 24229 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24230 { 24231 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24232 return static_cast<Result>( d.vkCopyMicromapToMemoryEXT( 24233 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) ); 24234 } 24235 24236 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24237 template <typename Dispatch> copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,Dispatch const & d) const24238 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT( 24239 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const 24240 { 24241 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24242 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24243 VULKAN_HPP_ASSERT( d.vkCopyMicromapToMemoryEXT && "Function <vkCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" ); 24244 # endif 24245 24246 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMicromapToMemoryEXT( 24247 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ) ); 24248 VULKAN_HPP_NAMESPACE::detail::resultCheck( 24249 result, 24250 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", 24251 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 24252 24253 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 24254 } 24255 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24256 24257 template <typename Dispatch> copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,Dispatch const & d) const24258 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24259 const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, 24260 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24261 { 24262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24263 return static_cast<Result>( d.vkCopyMemoryToMicromapEXT( 24264 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) ); 24265 } 24266 24267 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24268 template <typename Dispatch> copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,Dispatch const & d) const24269 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT( 24270 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const 24271 { 24272 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24273 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24274 VULKAN_HPP_ASSERT( d.vkCopyMemoryToMicromapEXT && "Function <vkCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24275 # endif 24276 24277 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToMicromapEXT( 24278 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ) ); 24279 VULKAN_HPP_NAMESPACE::detail::resultCheck( 24280 result, 24281 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", 24282 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 24283 24284 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 24285 } 24286 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24287 24288 template <typename Dispatch> writeMicromapsPropertiesEXT(uint32_t micromapCount,const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,void * pData,size_t stride,Dispatch const & d) const24289 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount, 24290 const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, 24291 VULKAN_HPP_NAMESPACE::QueryType queryType, 24292 size_t dataSize, 24293 void * pData, 24294 size_t stride, 24295 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24296 { 24297 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24298 return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( 24299 m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) ); 24300 } 24301 24302 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24303 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 24304 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeMicromapsPropertiesEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,size_t stride,Dispatch const & d) const24305 Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 24306 VULKAN_HPP_NAMESPACE::QueryType queryType, 24307 size_t dataSize, 24308 size_t stride, 24309 Dispatch const & d ) const 24310 { 24311 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24312 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24313 VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); 24314 # endif 24315 24316 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 24317 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 24318 VULKAN_HPP_NAMESPACE::Result result = 24319 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device, 24320 micromaps.size(), 24321 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 24322 static_cast<VkQueryType>( queryType ), 24323 data.size() * sizeof( DataType ), 24324 reinterpret_cast<void *>( data.data() ), 24325 stride ) ); 24326 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); 24327 24328 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 24329 } 24330 24331 template <typename DataType, typename Dispatch> 24332 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type writeMicromapsPropertyEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t stride,Dispatch const & d) const24333 Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 24334 VULKAN_HPP_NAMESPACE::QueryType queryType, 24335 size_t stride, 24336 Dispatch const & d ) const 24337 { 24338 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24339 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24340 VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); 24341 # endif 24342 24343 DataType data; 24344 VULKAN_HPP_NAMESPACE::Result result = 24345 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device, 24346 micromaps.size(), 24347 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 24348 static_cast<VkQueryType>( queryType ), 24349 sizeof( DataType ), 24350 reinterpret_cast<void *>( &data ), 24351 stride ) ); 24352 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); 24353 24354 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 24355 } 24356 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24357 24358 template <typename Dispatch> copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,Dispatch const & d) const24359 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24360 { 24361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24362 d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ); 24363 } 24364 24365 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24366 template <typename Dispatch> copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,Dispatch const & d) const24367 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24368 { 24369 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24370 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24371 VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24372 # endif 24373 24374 d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ); 24375 } 24376 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24377 24378 template <typename Dispatch> copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,Dispatch const & d) const24379 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, 24380 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24381 { 24382 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24383 d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ); 24384 } 24385 24386 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24387 template <typename Dispatch> copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,Dispatch const & d) const24388 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, 24389 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24390 { 24391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24392 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24393 VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapToMemoryEXT && "Function <vkCmdCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" ); 24394 # endif 24395 24396 d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ); 24397 } 24398 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24399 24400 template <typename Dispatch> copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,Dispatch const & d) const24401 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, 24402 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24403 { 24404 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24405 d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ); 24406 } 24407 24408 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24409 template <typename Dispatch> copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,Dispatch const & d) const24410 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, 24411 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24412 { 24413 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24414 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24415 VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToMicromapEXT && "Function <vkCmdCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24416 # endif 24417 24418 d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ); 24419 } 24420 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24421 24422 template <typename Dispatch> writeMicromapsPropertiesEXT(uint32_t micromapCount,const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const24423 VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount, 24424 const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, 24425 VULKAN_HPP_NAMESPACE::QueryType queryType, 24426 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 24427 uint32_t firstQuery, 24428 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24429 { 24430 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24431 d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, 24432 micromapCount, 24433 reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), 24434 static_cast<VkQueryType>( queryType ), 24435 static_cast<VkQueryPool>( queryPool ), 24436 firstQuery ); 24437 } 24438 24439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24440 template <typename Dispatch> 24441 VULKAN_HPP_INLINE void writeMicromapsPropertiesEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const24442 CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 24443 VULKAN_HPP_NAMESPACE::QueryType queryType, 24444 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 24445 uint32_t firstQuery, 24446 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24447 { 24448 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24449 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24450 VULKAN_HPP_ASSERT( d.vkCmdWriteMicromapsPropertiesEXT && "Function <vkCmdWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); 24451 # endif 24452 24453 d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, 24454 micromaps.size(), 24455 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 24456 static_cast<VkQueryType>( queryType ), 24457 static_cast<VkQueryPool>( queryPool ), 24458 firstQuery ); 24459 } 24460 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24461 24462 template <typename Dispatch> getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,Dispatch const & d) const24463 VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, 24464 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, 24465 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24466 { 24467 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24468 d.vkGetDeviceMicromapCompatibilityEXT( m_device, 24469 reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ), 24470 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); 24471 } 24472 24473 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24474 template <typename Dispatch> 24475 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo,Dispatch const & d) const24476 Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24477 { 24478 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24479 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24480 VULKAN_HPP_ASSERT( d.vkGetDeviceMicromapCompatibilityEXT && "Function <vkGetDeviceMicromapCompatibilityEXT> requires <VK_EXT_opacity_micromap>" ); 24481 # endif 24482 24483 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; 24484 d.vkGetDeviceMicromapCompatibilityEXT( m_device, 24485 reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ), 24486 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); 24487 24488 return compatibility; 24489 } 24490 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24491 24492 template <typename Dispatch> getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,Dispatch const & d) const24493 VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 24494 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, 24495 VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, 24496 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24497 { 24498 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24499 d.vkGetMicromapBuildSizesEXT( m_device, 24500 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 24501 reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ), 24502 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) ); 24503 } 24504 24505 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24506 template <typename Dispatch> 24507 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,Dispatch const & d) const24508 Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 24509 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, 24510 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24511 { 24512 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24513 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24514 VULKAN_HPP_ASSERT( d.vkGetMicromapBuildSizesEXT && "Function <vkGetMicromapBuildSizesEXT> requires <VK_EXT_opacity_micromap>" ); 24515 # endif 24516 24517 VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; 24518 d.vkGetMicromapBuildSizesEXT( m_device, 24519 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 24520 reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ), 24521 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) ); 24522 24523 return sizeInfo; 24524 } 24525 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24526 24527 //=== VK_HUAWEI_cluster_culling_shader === 24528 24529 template <typename Dispatch> 24530 VULKAN_HPP_INLINE void drawClusterHUAWEI(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const24531 CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24532 { 24533 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24534 d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 24535 } 24536 24537 template <typename Dispatch> drawClusterIndirectHUAWEI(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const24538 VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, 24539 VULKAN_HPP_NAMESPACE::DeviceSize offset, 24540 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24541 { 24542 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24543 d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); 24544 } 24545 24546 //=== VK_EXT_pageable_device_local_memory === 24547 24548 template <typename Dispatch> setMemoryPriorityEXT(VULKAN_HPP_NAMESPACE::DeviceMemory memory,float priority,Dispatch const & d) const24549 VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24550 { 24551 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24552 d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority ); 24553 } 24554 24555 //=== VK_KHR_maintenance4 === 24556 24557 template <typename Dispatch> getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const24558 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, 24559 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 24560 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24561 { 24562 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24563 d.vkGetDeviceBufferMemoryRequirementsKHR( 24564 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 24565 } 24566 24567 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24568 template <typename Dispatch> 24569 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const24570 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24571 { 24572 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24573 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24574 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && 24575 "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24576 # endif 24577 24578 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 24579 d.vkGetDeviceBufferMemoryRequirementsKHR( 24580 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24581 24582 return memoryRequirements; 24583 } 24584 24585 template <typename X, typename Y, typename... Z, typename Dispatch> 24586 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const24587 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24588 { 24589 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24590 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24591 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && 24592 "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24593 # endif 24594 24595 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 24596 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 24597 d.vkGetDeviceBufferMemoryRequirementsKHR( 24598 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24599 24600 return structureChain; 24601 } 24602 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24603 24604 template <typename Dispatch> getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const24605 VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 24606 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 24607 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24608 { 24609 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24610 d.vkGetDeviceImageMemoryRequirementsKHR( 24611 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 24612 } 24613 24614 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24615 template <typename Dispatch> 24616 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const24617 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24618 { 24619 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24620 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24621 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && 24622 "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24623 # endif 24624 24625 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 24626 d.vkGetDeviceImageMemoryRequirementsKHR( 24627 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24628 24629 return memoryRequirements; 24630 } 24631 24632 template <typename X, typename Y, typename... Z, typename Dispatch> 24633 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const24634 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24635 { 24636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24637 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24638 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && 24639 "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24640 # endif 24641 24642 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 24643 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 24644 d.vkGetDeviceImageMemoryRequirementsKHR( 24645 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24646 24647 return structureChain; 24648 } 24649 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24650 24651 template <typename Dispatch> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const24652 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 24653 uint32_t * pSparseMemoryRequirementCount, 24654 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 24655 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24656 { 24657 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24658 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 24659 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 24660 pSparseMemoryRequirementCount, 24661 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 24662 } 24663 24664 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24665 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 24666 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const24667 Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const 24668 { 24669 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24670 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24671 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && 24672 "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24673 # endif 24674 24675 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 24676 uint32_t sparseMemoryRequirementCount; 24677 d.vkGetDeviceImageSparseMemoryRequirementsKHR( 24678 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 24679 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 24680 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 24681 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 24682 &sparseMemoryRequirementCount, 24683 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 24684 24685 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 24686 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 24687 { 24688 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 24689 } 24690 return sparseMemoryRequirements; 24691 } 24692 24693 template <typename SparseImageMemoryRequirements2Allocator, 24694 typename Dispatch, 24695 typename std::enable_if< 24696 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 24697 int>::type> 24698 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const24699 Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, 24700 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 24701 Dispatch const & d ) const 24702 { 24703 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24704 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24705 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && 24706 "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24707 # endif 24708 24709 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 24710 sparseImageMemoryRequirements2Allocator ); 24711 uint32_t sparseMemoryRequirementCount; 24712 d.vkGetDeviceImageSparseMemoryRequirementsKHR( 24713 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 24714 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 24715 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 24716 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 24717 &sparseMemoryRequirementCount, 24718 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 24719 24720 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 24721 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 24722 { 24723 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 24724 } 24725 return sparseMemoryRequirements; 24726 } 24727 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24728 24729 //=== VK_VALVE_descriptor_set_host_mapping === 24730 24731 template <typename Dispatch> getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping,Dispatch const & d) const24732 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, 24733 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, 24734 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24735 { 24736 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24737 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, 24738 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ), 24739 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) ); 24740 } 24741 24742 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24743 template <typename Dispatch> 24744 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference,Dispatch const & d) const24745 Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, 24746 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24747 { 24748 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24749 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24750 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutHostMappingInfoVALVE && 24751 "Function <vkGetDescriptorSetLayoutHostMappingInfoVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" ); 24752 # endif 24753 24754 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; 24755 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, 24756 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ), 24757 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) ); 24758 24759 return hostMapping; 24760 } 24761 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24762 24763 template <typename Dispatch> 24764 VULKAN_HPP_INLINE void getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,void ** ppData,Dispatch const & d) const24765 Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24766 { 24767 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24768 d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData ); 24769 } 24770 24771 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24772 template <typename Dispatch> getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,Dispatch const & d) const24773 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 24774 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24775 { 24776 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24777 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24778 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetHostMappingVALVE && 24779 "Function <vkGetDescriptorSetHostMappingVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" ); 24780 # endif 24781 24782 void * pData; 24783 d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData ); 24784 24785 return pData; 24786 } 24787 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24788 24789 //=== VK_NV_copy_memory_indirect === 24790 24791 template <typename Dispatch> copyMemoryIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t copyCount,uint32_t stride,Dispatch const & d) const24792 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 24793 uint32_t copyCount, 24794 uint32_t stride, 24795 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24796 { 24797 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24798 d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride ); 24799 } 24800 24801 template <typename Dispatch> copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t copyCount,uint32_t stride,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources,Dispatch const & d) const24802 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 24803 uint32_t copyCount, 24804 uint32_t stride, 24805 VULKAN_HPP_NAMESPACE::Image dstImage, 24806 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 24807 const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, 24808 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24809 { 24810 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24811 d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, 24812 static_cast<VkDeviceAddress>( copyBufferAddress ), 24813 copyCount, 24814 stride, 24815 static_cast<VkImage>( dstImage ), 24816 static_cast<VkImageLayout>( dstImageLayout ), 24817 reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) ); 24818 } 24819 24820 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24821 template <typename Dispatch> 24822 VULKAN_HPP_INLINE void copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t stride,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources,Dispatch const & d) const24823 CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 24824 uint32_t stride, 24825 VULKAN_HPP_NAMESPACE::Image dstImage, 24826 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 24827 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources, 24828 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24829 { 24830 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24831 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24832 VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToImageIndirectNV && "Function <vkCmdCopyMemoryToImageIndirectNV> requires <VK_NV_copy_memory_indirect>" ); 24833 # endif 24834 24835 d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, 24836 static_cast<VkDeviceAddress>( copyBufferAddress ), 24837 imageSubresources.size(), 24838 stride, 24839 static_cast<VkImage>( dstImage ), 24840 static_cast<VkImageLayout>( dstImageLayout ), 24841 reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) ); 24842 } 24843 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24844 24845 //=== VK_NV_memory_decompression === 24846 24847 template <typename Dispatch> decompressMemoryNV(uint32_t decompressRegionCount,const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions,Dispatch const & d) const24848 VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, 24849 const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, 24850 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24851 { 24852 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24853 d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) ); 24854 } 24855 24856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24857 template <typename Dispatch> 24858 VULKAN_HPP_INLINE void decompressMemoryNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions,Dispatch const & d) const24859 CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions, 24860 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24861 { 24862 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24863 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24864 VULKAN_HPP_ASSERT( d.vkCmdDecompressMemoryNV && "Function <vkCmdDecompressMemoryNV> requires <VK_NV_memory_decompression>" ); 24865 # endif 24866 24867 d.vkCmdDecompressMemoryNV( 24868 m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) ); 24869 } 24870 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24871 24872 template <typename Dispatch> decompressMemoryIndirectCountNV(VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,uint32_t stride,Dispatch const & d) const24873 VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, 24874 VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, 24875 uint32_t stride, 24876 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24877 { 24878 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24879 d.vkCmdDecompressMemoryIndirectCountNV( 24880 m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride ); 24881 } 24882 24883 //=== VK_NV_device_generated_commands_compute === 24884 24885 template <typename Dispatch> getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const24886 VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, 24887 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 24888 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24889 { 24890 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24891 d.vkGetPipelineIndirectMemoryRequirementsNV( 24892 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 24893 } 24894 24895 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24896 template <typename Dispatch> 24897 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Dispatch const & d) const24898 Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 24899 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24900 { 24901 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24902 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24903 VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && 24904 "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" ); 24905 # endif 24906 24907 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 24908 d.vkGetPipelineIndirectMemoryRequirementsNV( 24909 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24910 24911 return memoryRequirements; 24912 } 24913 24914 template <typename X, typename Y, typename... Z, typename Dispatch> 24915 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Dispatch const & d) const24916 Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 24917 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24918 { 24919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24920 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24921 VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && 24922 "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" ); 24923 # endif 24924 24925 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 24926 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 24927 d.vkGetPipelineIndirectMemoryRequirementsNV( 24928 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24929 24930 return structureChain; 24931 } 24932 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24933 24934 template <typename Dispatch> updatePipelineIndirectBufferNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const24935 VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 24936 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 24937 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24938 { 24939 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24940 d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); 24941 } 24942 24943 template <typename Dispatch> getPipelineIndirectAddressNV(const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo,Dispatch const & d) const24944 VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, 24945 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24946 { 24947 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24948 return static_cast<DeviceAddress>( 24949 d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) ); 24950 } 24951 24952 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24953 template <typename Dispatch> 24954 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV(const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info,Dispatch const & d) const24955 Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24956 { 24957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24958 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24959 VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectDeviceAddressNV && 24960 "Function <vkGetPipelineIndirectDeviceAddressNV> requires <VK_NV_device_generated_commands_compute>" ); 24961 # endif 24962 24963 VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) ); 24964 24965 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 24966 } 24967 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24968 24969 //=== VK_EXT_extended_dynamic_state3 === 24970 24971 template <typename Dispatch> setDepthClampEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable,Dispatch const & d) const24972 VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24973 { 24974 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24975 d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) ); 24976 } 24977 24978 template <typename Dispatch> setPolygonModeEXT(VULKAN_HPP_NAMESPACE::PolygonMode polygonMode,Dispatch const & d) const24979 VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24980 { 24981 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24982 d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) ); 24983 } 24984 24985 template <typename Dispatch> setRasterizationSamplesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,Dispatch const & d) const24986 VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, 24987 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24988 { 24989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24990 d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) ); 24991 } 24992 24993 template <typename Dispatch> setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,Dispatch const & d) const24994 VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 24995 const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, 24996 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24997 { 24998 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24999 d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) ); 25000 } 25001 25002 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25003 template <typename Dispatch> setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,Dispatch const & d) const25004 VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 25005 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask, 25006 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 25007 { 25008 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25009 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25010 VULKAN_HPP_ASSERT( d.vkCmdSetSampleMaskEXT && "Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25011 # endif 25012 # ifdef VULKAN_HPP_NO_EXCEPTIONS 25013 VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast<uint32_t>( samples ) + 31 ) / 32 ); 25014 # else 25015 if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 ) 25016 { 25017 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32" ); 25018 } 25019 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 25020 25021 d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) ); 25022 } 25023 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25024 25025 template <typename Dispatch> setAlphaToCoverageEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,Dispatch const & d) const25026 VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, 25027 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25028 { 25029 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25030 d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) ); 25031 } 25032 25033 template <typename Dispatch> setAlphaToOneEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable,Dispatch const & d) const25034 VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25035 { 25036 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25037 d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) ); 25038 } 25039 25040 template <typename Dispatch> setLogicOpEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable,Dispatch const & d) const25041 VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25042 { 25043 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25044 d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) ); 25045 } 25046 25047 template <typename Dispatch> setColorBlendEnableEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,Dispatch const & d) const25048 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, 25049 uint32_t attachmentCount, 25050 const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, 25051 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25052 { 25053 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25054 d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) ); 25055 } 25056 25057 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25058 template <typename Dispatch> setColorBlendEnableEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,Dispatch const & d) const25059 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, 25060 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables, 25061 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25062 { 25063 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25064 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25065 VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEnableEXT && 25066 "Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25067 # endif 25068 25069 d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) ); 25070 } 25071 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25072 25073 template <typename Dispatch> setColorBlendEquationEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,Dispatch const & d) const25074 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, 25075 uint32_t attachmentCount, 25076 const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, 25077 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25078 { 25079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25080 d.vkCmdSetColorBlendEquationEXT( 25081 m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) ); 25082 } 25083 25084 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25085 template <typename Dispatch> 25086 VULKAN_HPP_INLINE void setColorBlendEquationEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,Dispatch const & d) const25087 CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, 25088 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations, 25089 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25090 { 25091 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25092 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25093 VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEquationEXT && 25094 "Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25095 # endif 25096 25097 d.vkCmdSetColorBlendEquationEXT( 25098 m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) ); 25099 } 25100 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25101 25102 template <typename Dispatch> setColorWriteMaskEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,Dispatch const & d) const25103 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, 25104 uint32_t attachmentCount, 25105 const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, 25106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25107 { 25108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25109 d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) ); 25110 } 25111 25112 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25113 template <typename Dispatch> 25114 VULKAN_HPP_INLINE void setColorWriteMaskEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,Dispatch const & d) const25115 CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, 25116 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks, 25117 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25118 { 25119 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25120 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25121 VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteMaskEXT && 25122 "Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25123 # endif 25124 25125 d.vkCmdSetColorWriteMaskEXT( 25126 m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) ); 25127 } 25128 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25129 25130 template <typename Dispatch> setTessellationDomainOriginEXT(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,Dispatch const & d) const25131 VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, 25132 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25133 { 25134 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25135 d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) ); 25136 } 25137 25138 template <typename Dispatch> setRasterizationStreamEXT(uint32_t rasterizationStream,Dispatch const & d) const25139 VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25140 { 25141 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25142 d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream ); 25143 } 25144 25145 template <typename Dispatch> 25146 VULKAN_HPP_INLINE void setConservativeRasterizationModeEXT(VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,Dispatch const & d) const25147 CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, 25148 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25149 { 25150 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25151 d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) ); 25152 } 25153 25154 template <typename Dispatch> setExtraPrimitiveOverestimationSizeEXT(float extraPrimitiveOverestimationSize,Dispatch const & d) const25155 VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, 25156 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25157 { 25158 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25159 d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize ); 25160 } 25161 25162 template <typename Dispatch> setDepthClipEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable,Dispatch const & d) const25163 VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25164 { 25165 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25166 d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) ); 25167 } 25168 25169 template <typename Dispatch> setSampleLocationsEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,Dispatch const & d) const25170 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, 25171 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25172 { 25173 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25174 d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) ); 25175 } 25176 25177 template <typename Dispatch> setColorBlendAdvancedEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,Dispatch const & d) const25178 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, 25179 uint32_t attachmentCount, 25180 const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, 25181 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25182 { 25183 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25184 d.vkCmdSetColorBlendAdvancedEXT( 25185 m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) ); 25186 } 25187 25188 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25189 template <typename Dispatch> 25190 VULKAN_HPP_INLINE void setColorBlendAdvancedEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,Dispatch const & d) const25191 CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, 25192 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced, 25193 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25194 { 25195 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25196 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25197 VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendAdvancedEXT && 25198 "Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25199 # endif 25200 25201 d.vkCmdSetColorBlendAdvancedEXT( 25202 m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) ); 25203 } 25204 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25205 25206 template <typename Dispatch> setProvokingVertexModeEXT(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,Dispatch const & d) const25207 VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, 25208 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25209 { 25210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25211 d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) ); 25212 } 25213 25214 template <typename Dispatch> setLineRasterizationModeEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,Dispatch const & d) const25215 VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, 25216 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25217 { 25218 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25219 d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) ); 25220 } 25221 25222 template <typename Dispatch> setLineStippleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable,Dispatch const & d) const25223 VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25224 { 25225 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25226 d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) ); 25227 } 25228 25229 template <typename Dispatch> setDepthClipNegativeOneToOneEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,Dispatch const & d) const25230 VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, 25231 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25232 { 25233 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25234 d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) ); 25235 } 25236 25237 template <typename Dispatch> setViewportWScalingEnableNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,Dispatch const & d) const25238 VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, 25239 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25240 { 25241 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25242 d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) ); 25243 } 25244 25245 template <typename Dispatch> setViewportSwizzleNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,Dispatch const & d) const25246 VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, 25247 uint32_t viewportCount, 25248 const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, 25249 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25250 { 25251 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25252 d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) ); 25253 } 25254 25255 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25256 template <typename Dispatch> 25257 VULKAN_HPP_INLINE void setViewportSwizzleNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,Dispatch const & d) const25258 CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, 25259 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles, 25260 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25261 { 25262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25263 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25264 VULKAN_HPP_ASSERT( d.vkCmdSetViewportSwizzleNV && 25265 "Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25266 # endif 25267 25268 d.vkCmdSetViewportSwizzleNV( 25269 m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) ); 25270 } 25271 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25272 25273 template <typename Dispatch> setCoverageToColorEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,Dispatch const & d) const25274 VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, 25275 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25276 { 25277 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25278 d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) ); 25279 } 25280 25281 template <typename Dispatch> setCoverageToColorLocationNV(uint32_t coverageToColorLocation,Dispatch const & d) const25282 VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25283 { 25284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25285 d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation ); 25286 } 25287 25288 template <typename Dispatch> setCoverageModulationModeNV(VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,Dispatch const & d) const25289 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, 25290 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25291 { 25292 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25293 d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) ); 25294 } 25295 25296 template <typename Dispatch> setCoverageModulationTableEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,Dispatch const & d) const25297 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, 25298 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25299 { 25300 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25301 d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) ); 25302 } 25303 25304 template <typename Dispatch> setCoverageModulationTableNV(uint32_t coverageModulationTableCount,const float * pCoverageModulationTable,Dispatch const & d) const25305 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount, 25306 const float * pCoverageModulationTable, 25307 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25308 { 25309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25310 d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); 25311 } 25312 25313 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25314 template <typename Dispatch> setCoverageModulationTableNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,Dispatch const & d) const25315 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable, 25316 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25317 { 25318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25319 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25320 VULKAN_HPP_ASSERT( d.vkCmdSetCoverageModulationTableNV && 25321 "Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25322 # endif 25323 25324 d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() ); 25325 } 25326 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25327 25328 template <typename Dispatch> setShadingRateImageEnableNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,Dispatch const & d) const25329 VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, 25330 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25331 { 25332 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25333 d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) ); 25334 } 25335 25336 template <typename Dispatch> setRepresentativeFragmentTestEnableNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,Dispatch const & d) const25337 VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, 25338 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25339 { 25340 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25341 d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) ); 25342 } 25343 25344 template <typename Dispatch> setCoverageReductionModeNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,Dispatch const & d) const25345 VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, 25346 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25347 { 25348 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25349 d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) ); 25350 } 25351 25352 //=== VK_EXT_shader_module_identifier === 25353 25354 template <typename Dispatch> getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,Dispatch const & d) const25355 VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 25356 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, 25357 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25358 { 25359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25360 d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); 25361 } 25362 25363 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25364 template <typename Dispatch> 25365 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Dispatch const & d) const25366 Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25367 { 25368 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25369 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25370 VULKAN_HPP_ASSERT( d.vkGetShaderModuleIdentifierEXT && "Function <vkGetShaderModuleIdentifierEXT> requires <VK_EXT_shader_module_identifier>" ); 25371 # endif 25372 25373 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; 25374 d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); 25375 25376 return identifier; 25377 } 25378 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25379 25380 template <typename Dispatch> getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,Dispatch const & d) const25381 VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, 25382 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, 25383 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25384 { 25385 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25386 d.vkGetShaderModuleCreateInfoIdentifierEXT( 25387 m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); 25388 } 25389 25390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25391 template <typename Dispatch> 25392 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Dispatch const & d) const25393 Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 25394 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25395 { 25396 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25397 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25398 VULKAN_HPP_ASSERT( d.vkGetShaderModuleCreateInfoIdentifierEXT && 25399 "Function <vkGetShaderModuleCreateInfoIdentifierEXT> requires <VK_EXT_shader_module_identifier>" ); 25400 # endif 25401 25402 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; 25403 d.vkGetShaderModuleCreateInfoIdentifierEXT( 25404 m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); 25405 25406 return identifier; 25407 } 25408 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25409 25410 //=== VK_NV_optical_flow === 25411 25412 template <typename Dispatch> 25413 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,uint32_t * pFormatCount,VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,Dispatch const & d) const25414 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, 25415 uint32_t * pFormatCount, 25416 VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, 25417 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25418 { 25419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25420 return static_cast<Result>( 25421 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 25422 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ), 25423 pFormatCount, 25424 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) ); 25425 } 25426 25427 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25428 template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch> 25429 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 25430 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,Dispatch const & d) const25431 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, 25432 Dispatch const & d ) const 25433 { 25434 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25435 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25436 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && 25437 "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" ); 25438 # endif 25439 25440 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties; 25441 uint32_t formatCount; 25442 VULKAN_HPP_NAMESPACE::Result result; 25443 do 25444 { 25445 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( 25446 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); 25447 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) 25448 { 25449 imageFormatProperties.resize( formatCount ); 25450 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25451 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 25452 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), 25453 &formatCount, 25454 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); 25455 } 25456 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 25457 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); 25458 VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); 25459 if ( formatCount < imageFormatProperties.size() ) 25460 { 25461 imageFormatProperties.resize( formatCount ); 25462 } 25463 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 25464 } 25465 25466 template <typename OpticalFlowImageFormatPropertiesNVAllocator, 25467 typename Dispatch, 25468 typename std::enable_if< 25469 std::is_same<typename OpticalFlowImageFormatPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value, 25470 int>::type> 25471 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 25472 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,Dispatch const & d) const25473 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, 25474 OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, 25475 Dispatch const & d ) const 25476 { 25477 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25478 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25479 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && 25480 "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" ); 25481 # endif 25482 25483 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties( 25484 opticalFlowImageFormatPropertiesNVAllocator ); 25485 uint32_t formatCount; 25486 VULKAN_HPP_NAMESPACE::Result result; 25487 do 25488 { 25489 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( 25490 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); 25491 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) 25492 { 25493 imageFormatProperties.resize( formatCount ); 25494 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25495 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 25496 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), 25497 &formatCount, 25498 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); 25499 } 25500 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 25501 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); 25502 VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); 25503 if ( formatCount < imageFormatProperties.size() ) 25504 { 25505 imageFormatProperties.resize( formatCount ); 25506 } 25507 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 25508 } 25509 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25510 25511 template <typename Dispatch> createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,Dispatch const & d) const25512 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, 25513 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25514 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, 25515 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25516 { 25517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25518 return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device, 25519 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ), 25520 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 25521 reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) ); 25522 } 25523 25524 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25525 template <typename Dispatch> 25526 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25527 Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, 25528 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25529 Dispatch const & d ) const 25530 { 25531 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25532 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25533 VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 25534 # endif 25535 25536 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; 25537 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV( 25538 m_device, 25539 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), 25540 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25541 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); 25542 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); 25543 25544 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( session ) ); 25545 } 25546 25547 # ifndef VULKAN_HPP_NO_SMART_HANDLE 25548 template <typename Dispatch> 25549 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type createOpticalFlowSessionNVUnique(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25550 Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, 25551 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25552 Dispatch const & d ) const 25553 { 25554 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25555 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25556 VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 25557 # endif 25558 25559 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; 25560 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV( 25561 m_device, 25562 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), 25563 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25564 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); 25565 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); 25566 25567 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 25568 result, UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>( session, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 25569 } 25570 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 25571 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25572 25573 template <typename Dispatch> destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const25574 VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25575 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25576 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25577 { 25578 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25579 d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 25580 } 25581 25582 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25583 template <typename Dispatch> destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25584 VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25585 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25586 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25587 { 25588 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25589 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25590 VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 25591 # endif 25592 25593 d.vkDestroyOpticalFlowSessionNV( 25594 m_device, 25595 static_cast<VkOpticalFlowSessionNV>( session ), 25596 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 25597 } 25598 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25599 25600 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const25601 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25602 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25603 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25604 { 25605 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25606 d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 25607 } 25608 25609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25610 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25611 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25612 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25613 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25614 { 25615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25616 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25617 VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 25618 # endif 25619 25620 d.vkDestroyOpticalFlowSessionNV( 25621 m_device, 25622 static_cast<VkOpticalFlowSessionNV>( session ), 25623 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 25624 } 25625 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25626 25627 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 25628 template <typename Dispatch> bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,VULKAN_HPP_NAMESPACE::ImageView view,VULKAN_HPP_NAMESPACE::ImageLayout layout,Dispatch const & d) const25629 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25630 VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, 25631 VULKAN_HPP_NAMESPACE::ImageView view, 25632 VULKAN_HPP_NAMESPACE::ImageLayout layout, 25633 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25634 { 25635 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25636 return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device, 25637 static_cast<VkOpticalFlowSessionNV>( session ), 25638 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), 25639 static_cast<VkImageView>( view ), 25640 static_cast<VkImageLayout>( layout ) ) ); 25641 } 25642 #else 25643 template <typename Dispatch> 25644 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,VULKAN_HPP_NAMESPACE::ImageView view,VULKAN_HPP_NAMESPACE::ImageLayout layout,Dispatch const & d) const25645 Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25646 VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, 25647 VULKAN_HPP_NAMESPACE::ImageView view, 25648 VULKAN_HPP_NAMESPACE::ImageLayout layout, 25649 Dispatch const & d ) const 25650 { 25651 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25652 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25653 VULKAN_HPP_ASSERT( d.vkBindOpticalFlowSessionImageNV && "Function <vkBindOpticalFlowSessionImageNV> requires <VK_NV_optical_flow>" ); 25654 # endif 25655 25656 VULKAN_HPP_NAMESPACE::Result result = 25657 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindOpticalFlowSessionImageNV( m_device, 25658 static_cast<VkOpticalFlowSessionNV>( session ), 25659 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), 25660 static_cast<VkImageView>( view ), 25661 static_cast<VkImageLayout>( layout ) ) ); 25662 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); 25663 25664 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 25665 } 25666 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 25667 25668 template <typename Dispatch> opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,Dispatch const & d) const25669 VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25670 const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, 25671 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25672 { 25673 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25674 d.vkCmdOpticalFlowExecuteNV( 25675 m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) ); 25676 } 25677 25678 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25679 template <typename Dispatch> opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,Dispatch const & d) const25680 VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25681 const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, 25682 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25683 { 25684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25685 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25686 VULKAN_HPP_ASSERT( d.vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> requires <VK_NV_optical_flow>" ); 25687 # endif 25688 25689 d.vkCmdOpticalFlowExecuteNV( 25690 m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) ); 25691 } 25692 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25693 25694 //=== VK_KHR_maintenance5 === 25695 25696 template <typename Dispatch> bindIndexBuffer2KHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const25697 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 25698 VULKAN_HPP_NAMESPACE::DeviceSize offset, 25699 VULKAN_HPP_NAMESPACE::DeviceSize size, 25700 VULKAN_HPP_NAMESPACE::IndexType indexType, 25701 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25702 { 25703 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25704 d.vkCmdBindIndexBuffer2KHR( m_commandBuffer, 25705 static_cast<VkBuffer>( buffer ), 25706 static_cast<VkDeviceSize>( offset ), 25707 static_cast<VkDeviceSize>( size ), 25708 static_cast<VkIndexType>( indexType ) ); 25709 } 25710 25711 template <typename Dispatch> getRenderingAreaGranularityKHR(const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const25712 VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, 25713 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 25714 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25715 { 25716 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25717 d.vkGetRenderingAreaGranularityKHR( 25718 m_device, reinterpret_cast<const VkRenderingAreaInfo *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 25719 } 25720 25721 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25722 template <typename Dispatch> 25723 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D getRenderingAreaGranularityKHR(const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo,Dispatch const & d) const25724 Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25725 { 25726 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25727 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25728 VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularityKHR && "Function <vkGetRenderingAreaGranularityKHR> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 25729 # endif 25730 25731 VULKAN_HPP_NAMESPACE::Extent2D granularity; 25732 d.vkGetRenderingAreaGranularityKHR( 25733 m_device, reinterpret_cast<const VkRenderingAreaInfo *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 25734 25735 return granularity; 25736 } 25737 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25738 25739 template <typename Dispatch> getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const25740 VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, 25741 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 25742 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25743 { 25744 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25745 d.vkGetDeviceImageSubresourceLayoutKHR( 25746 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( pInfo ), reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 25747 } 25748 25749 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25750 template <typename Dispatch> 25751 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info,Dispatch const & d) const25752 Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25753 { 25754 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25755 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25756 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && 25757 "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 25758 # endif 25759 25760 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 25761 d.vkGetDeviceImageSubresourceLayoutKHR( 25762 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 25763 25764 return layout; 25765 } 25766 25767 template <typename X, typename Y, typename... Z, typename Dispatch> 25768 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info,Dispatch const & d) const25769 Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25770 { 25771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25772 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25773 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && 25774 "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 25775 # endif 25776 25777 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 25778 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 25779 d.vkGetDeviceImageSubresourceLayoutKHR( 25780 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 25781 25782 return structureChain; 25783 } 25784 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25785 25786 template <typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const25787 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, 25788 const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, 25789 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 25790 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25791 { 25792 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25793 d.vkGetImageSubresourceLayout2KHR( m_device, 25794 static_cast<VkImage>( image ), 25795 reinterpret_cast<const VkImageSubresource2 *>( pSubresource ), 25796 reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 25797 } 25798 25799 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25800 template <typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const25801 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2KHR( 25802 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25803 { 25804 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25805 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25806 VULKAN_HPP_ASSERT( 25807 d.vkGetImageSubresourceLayout2KHR && 25808 "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 25809 # endif 25810 25811 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 25812 d.vkGetImageSubresourceLayout2KHR( m_device, 25813 static_cast<VkImage>( image ), 25814 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 25815 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 25816 25817 return layout; 25818 } 25819 25820 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const25821 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2KHR( 25822 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25823 { 25824 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25825 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25826 VULKAN_HPP_ASSERT( 25827 d.vkGetImageSubresourceLayout2KHR && 25828 "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 25829 # endif 25830 25831 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 25832 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 25833 d.vkGetImageSubresourceLayout2KHR( m_device, 25834 static_cast<VkImage>( image ), 25835 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 25836 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 25837 25838 return structureChain; 25839 } 25840 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25841 25842 //=== VK_AMD_anti_lag === 25843 25844 template <typename Dispatch> antiLagUpdateAMD(const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData,Dispatch const & d) const25845 VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25846 { 25847 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25848 d.vkAntiLagUpdateAMD( m_device, reinterpret_cast<const VkAntiLagDataAMD *>( pData ) ); 25849 } 25850 25851 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25852 template <typename Dispatch> antiLagUpdateAMD(const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data,Dispatch const & d) const25853 VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25854 { 25855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25856 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25857 VULKAN_HPP_ASSERT( d.vkAntiLagUpdateAMD && "Function <vkAntiLagUpdateAMD> requires <VK_AMD_anti_lag>" ); 25858 # endif 25859 25860 d.vkAntiLagUpdateAMD( m_device, reinterpret_cast<const VkAntiLagDataAMD *>( &data ) ); 25861 } 25862 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25863 25864 //=== VK_EXT_shader_object === 25865 25866 template <typename Dispatch> createShadersEXT(uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,Dispatch const & d) const25867 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount, 25868 const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, 25869 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25870 VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, 25871 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25872 { 25873 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25874 return static_cast<Result>( d.vkCreateShadersEXT( m_device, 25875 createInfoCount, 25876 reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ), 25877 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 25878 reinterpret_cast<VkShaderEXT *>( pShaders ) ) ); 25879 } 25880 25881 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25882 template <typename ShaderEXTAllocator, typename Dispatch> 25883 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>> createShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25884 Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 25885 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25886 Dispatch const & d ) const 25887 { 25888 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25889 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25890 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25891 # endif 25892 25893 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size() ); 25894 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25895 d.vkCreateShadersEXT( m_device, 25896 createInfos.size(), 25897 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 25898 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25899 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 25900 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25901 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", 25902 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25903 25904 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>( result, std::move( shaders ) ); 25905 } 25906 25907 template <typename ShaderEXTAllocator, 25908 typename Dispatch, 25909 typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, VULKAN_HPP_NAMESPACE::ShaderEXT>::value, int>::type> 25910 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>> createShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,ShaderEXTAllocator & shaderEXTAllocator,Dispatch const & d) const25911 Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 25912 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25913 ShaderEXTAllocator & shaderEXTAllocator, 25914 Dispatch const & d ) const 25915 { 25916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25917 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25918 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25919 # endif 25920 25921 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size(), shaderEXTAllocator ); 25922 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25923 d.vkCreateShadersEXT( m_device, 25924 createInfos.size(), 25925 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 25926 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25927 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 25928 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25929 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", 25930 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25931 25932 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>( result, std::move( shaders ) ); 25933 } 25934 25935 template <typename Dispatch> 25936 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::ShaderEXT> createShaderEXT(const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25937 Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, 25938 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25939 Dispatch const & d ) const 25940 { 25941 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25942 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25943 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25944 # endif 25945 25946 VULKAN_HPP_NAMESPACE::ShaderEXT shader; 25947 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25948 d.vkCreateShadersEXT( m_device, 25949 1, 25950 reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), 25951 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25952 reinterpret_cast<VkShaderEXT *>( &shader ) ) ); 25953 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25954 VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", 25955 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25956 25957 return ResultValue<VULKAN_HPP_NAMESPACE::ShaderEXT>( result, std::move( shader ) ); 25958 } 25959 25960 # ifndef VULKAN_HPP_NO_SMART_HANDLE 25961 template <typename Dispatch, typename ShaderEXTAllocator> 25962 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>> createShadersEXTUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25963 Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 25964 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25965 Dispatch const & d ) const 25966 { 25967 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25968 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25969 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25970 # endif 25971 25972 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() ); 25973 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25974 d.vkCreateShadersEXT( m_device, 25975 createInfos.size(), 25976 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 25977 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25978 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 25979 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25980 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", 25981 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25982 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders; 25983 uniqueShaders.reserve( createInfos.size() ); 25984 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 25985 for ( auto const & shader : shaders ) 25986 { 25987 uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); 25988 } 25989 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); 25990 } 25991 25992 template < 25993 typename Dispatch, 25994 typename ShaderEXTAllocator, 25995 typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::value, int>::type> 25996 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>> createShadersEXTUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,ShaderEXTAllocator & shaderEXTAllocator,Dispatch const & d) const25997 Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 25998 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25999 ShaderEXTAllocator & shaderEXTAllocator, 26000 Dispatch const & d ) const 26001 { 26002 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26003 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26004 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 26005 # endif 26006 26007 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() ); 26008 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26009 d.vkCreateShadersEXT( m_device, 26010 createInfos.size(), 26011 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 26012 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26013 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 26014 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 26015 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", 26016 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 26017 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); 26018 uniqueShaders.reserve( createInfos.size() ); 26019 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 26020 for ( auto const & shader : shaders ) 26021 { 26022 uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); 26023 } 26024 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); 26025 } 26026 26027 template <typename Dispatch> 26028 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>> createShaderEXTUnique(const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26029 Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, 26030 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26031 Dispatch const & d ) const 26032 { 26033 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26034 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26035 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 26036 # endif 26037 26038 VULKAN_HPP_NAMESPACE::ShaderEXT shader; 26039 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26040 d.vkCreateShadersEXT( m_device, 26041 1, 26042 reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), 26043 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26044 reinterpret_cast<VkShaderEXT *>( &shader ) ) ); 26045 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 26046 VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", 26047 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 26048 26049 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>( 26050 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>( shader, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 26051 } 26052 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 26053 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26054 26055 template <typename Dispatch> destroyShaderEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const26056 VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 26057 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26058 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26059 { 26060 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26061 d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 26062 } 26063 26064 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26065 template <typename Dispatch> destroyShaderEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26066 VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 26067 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26068 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26069 { 26070 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26071 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26072 VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" ); 26073 # endif 26074 26075 d.vkDestroyShaderEXT( m_device, 26076 static_cast<VkShaderEXT>( shader ), 26077 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 26078 } 26079 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26080 26081 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderEXT shader,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const26082 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 26083 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26084 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26085 { 26086 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26087 d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 26088 } 26089 26090 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26091 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26092 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 26093 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26094 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26095 { 26096 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26097 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26098 VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" ); 26099 # endif 26100 26101 d.vkDestroyShaderEXT( m_device, 26102 static_cast<VkShaderEXT>( shader ), 26103 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 26104 } 26105 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26106 26107 template <typename Dispatch> 26108 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,size_t * pDataSize,void * pData,Dispatch const & d) const26109 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26110 { 26111 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26112 return static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), pDataSize, pData ) ); 26113 } 26114 26115 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26116 template <typename Uint8_tAllocator, typename Dispatch> 26117 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Dispatch const & d) const26118 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const 26119 { 26120 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26121 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26122 VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" ); 26123 # endif 26124 26125 std::vector<uint8_t, Uint8_tAllocator> data; 26126 size_t dataSize; 26127 VULKAN_HPP_NAMESPACE::Result result; 26128 do 26129 { 26130 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); 26131 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 26132 { 26133 data.resize( dataSize ); 26134 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26135 d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 26136 } 26137 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26138 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); 26139 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 26140 if ( dataSize < data.size() ) 26141 { 26142 data.resize( dataSize ); 26143 } 26144 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 26145 } 26146 26147 template <typename Uint8_tAllocator, 26148 typename Dispatch, 26149 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 26150 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const26151 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 26152 { 26153 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26154 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26155 VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" ); 26156 # endif 26157 26158 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 26159 size_t dataSize; 26160 VULKAN_HPP_NAMESPACE::Result result; 26161 do 26162 { 26163 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); 26164 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 26165 { 26166 data.resize( dataSize ); 26167 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26168 d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 26169 } 26170 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26171 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); 26172 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 26173 if ( dataSize < data.size() ) 26174 { 26175 data.resize( dataSize ); 26176 } 26177 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 26178 } 26179 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26180 26181 template <typename Dispatch> bindShadersEXT(uint32_t stageCount,const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages,const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,Dispatch const & d) const26182 VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount, 26183 const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, 26184 const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, 26185 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26186 { 26187 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26188 d.vkCmdBindShadersEXT( 26189 m_commandBuffer, stageCount, reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), reinterpret_cast<const VkShaderEXT *>( pShaders ) ); 26190 } 26191 26192 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26193 template <typename Dispatch> bindShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,Dispatch const & d) const26194 VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages, 26195 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders, 26196 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 26197 { 26198 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26199 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26200 VULKAN_HPP_ASSERT( d.vkCmdBindShadersEXT && "Function <vkCmdBindShadersEXT> requires <VK_EXT_shader_object>" ); 26201 # endif 26202 # ifdef VULKAN_HPP_NO_EXCEPTIONS 26203 VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); 26204 # else 26205 if ( stages.size() != shaders.size() ) 26206 { 26207 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); 26208 } 26209 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 26210 26211 d.vkCmdBindShadersEXT( m_commandBuffer, 26212 stages.size(), 26213 reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ), 26214 reinterpret_cast<const VkShaderEXT *>( shaders.data() ) ); 26215 } 26216 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26217 26218 //=== VK_KHR_pipeline_binary === 26219 26220 template <typename Dispatch> createPipelineBinariesKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries,Dispatch const & d) const26221 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo, 26222 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26223 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, 26224 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26225 { 26226 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26227 return static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, 26228 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( pCreateInfo ), 26229 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 26230 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( pBinaries ) ) ); 26231 } 26232 26233 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26234 template <typename PipelineBinaryKHRAllocator, typename Dispatch> 26235 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator>> createPipelineBinariesKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26236 Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, 26237 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26238 Dispatch const & d ) const 26239 { 26240 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26241 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26242 VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); 26243 # endif 26244 26245 std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator> pipelineBinaries; 26246 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; 26247 VULKAN_HPP_NAMESPACE::Result result; 26248 if ( createInfo.pKeysAndDataInfo ) 26249 { 26250 VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); 26251 pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); 26252 binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; 26253 binaries.pPipelineBinaries = pipelineBinaries.data(); 26254 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26255 m_device, 26256 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26257 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26258 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26259 } 26260 else 26261 { 26262 VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); 26263 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26264 m_device, 26265 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26266 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26267 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26268 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 26269 { 26270 pipelineBinaries.resize( binaries.pipelineBinaryCount ); 26271 binaries.pPipelineBinaries = pipelineBinaries.data(); 26272 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26273 m_device, 26274 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26275 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26276 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26277 } 26278 } 26279 26280 VULKAN_HPP_NAMESPACE::detail::resultCheck( 26281 result, 26282 VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", 26283 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); 26284 26285 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator>>( result, std::move( pipelineBinaries ) ); 26286 } 26287 26288 template <typename PipelineBinaryKHRAllocator, 26289 typename Dispatch, 26290 typename std::enable_if<std::is_same<typename PipelineBinaryKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineBinaryKHR>::value, int>::type> 26291 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator>> createPipelineBinariesKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator,Dispatch const & d) const26292 Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, 26293 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26294 PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, 26295 Dispatch const & d ) const 26296 { 26297 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26298 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26299 VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); 26300 # endif 26301 26302 std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator> pipelineBinaries( pipelineBinaryKHRAllocator ); 26303 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; 26304 VULKAN_HPP_NAMESPACE::Result result; 26305 if ( createInfo.pKeysAndDataInfo ) 26306 { 26307 VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); 26308 pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); 26309 binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; 26310 binaries.pPipelineBinaries = pipelineBinaries.data(); 26311 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26312 m_device, 26313 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26314 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26315 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26316 } 26317 else 26318 { 26319 VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); 26320 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26321 m_device, 26322 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26323 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26324 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26325 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 26326 { 26327 pipelineBinaries.resize( binaries.pipelineBinaryCount ); 26328 binaries.pPipelineBinaries = pipelineBinaries.data(); 26329 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26330 m_device, 26331 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26332 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26333 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26334 } 26335 } 26336 26337 VULKAN_HPP_NAMESPACE::detail::resultCheck( 26338 result, 26339 VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", 26340 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); 26341 26342 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator>>( result, std::move( pipelineBinaries ) ); 26343 } 26344 26345 # ifndef VULKAN_HPP_NO_SMART_HANDLE 26346 template <typename Dispatch, typename PipelineBinaryKHRAllocator> 26347 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>> createPipelineBinariesKHRUnique(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26348 Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, 26349 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26350 Dispatch const & d ) const 26351 { 26352 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26353 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26354 VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); 26355 # endif 26356 26357 std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR> pipelineBinaries; 26358 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; 26359 VULKAN_HPP_NAMESPACE::Result result; 26360 if ( createInfo.pKeysAndDataInfo ) 26361 { 26362 VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); 26363 pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); 26364 binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; 26365 binaries.pPipelineBinaries = pipelineBinaries.data(); 26366 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26367 m_device, 26368 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26369 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26370 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26371 } 26372 else 26373 { 26374 VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); 26375 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26376 m_device, 26377 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26378 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26379 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26380 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 26381 { 26382 pipelineBinaries.resize( binaries.pipelineBinaryCount ); 26383 binaries.pPipelineBinaries = pipelineBinaries.data(); 26384 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26385 m_device, 26386 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26387 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26388 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26389 } 26390 } 26391 26392 VULKAN_HPP_NAMESPACE::detail::resultCheck( 26393 result, 26394 VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", 26395 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); 26396 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator> uniquePipelineBinaries; 26397 uniquePipelineBinaries.reserve( pipelineBinaries.size() ); 26398 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 26399 for ( auto const & pipelineBinary : pipelineBinaries ) 26400 { 26401 uniquePipelineBinaries.push_back( UniqueHandle<PipelineBinaryKHR, Dispatch>( pipelineBinary, deleter ) ); 26402 } 26403 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>>( 26404 result, std::move( uniquePipelineBinaries ) ); 26405 } 26406 26407 template <typename Dispatch, 26408 typename PipelineBinaryKHRAllocator, 26409 typename std::enable_if< 26410 std::is_same<typename PipelineBinaryKHRAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>>::value, 26411 int>::type> 26412 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>> createPipelineBinariesKHRUnique(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator,Dispatch const & d) const26413 Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, 26414 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26415 PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, 26416 Dispatch const & d ) const 26417 { 26418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26419 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26420 VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); 26421 # endif 26422 26423 std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR> pipelineBinaries; 26424 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; 26425 VULKAN_HPP_NAMESPACE::Result result; 26426 if ( createInfo.pKeysAndDataInfo ) 26427 { 26428 VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); 26429 pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); 26430 binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; 26431 binaries.pPipelineBinaries = pipelineBinaries.data(); 26432 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26433 m_device, 26434 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26435 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26436 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26437 } 26438 else 26439 { 26440 VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); 26441 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26442 m_device, 26443 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26444 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26445 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26446 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 26447 { 26448 pipelineBinaries.resize( binaries.pipelineBinaryCount ); 26449 binaries.pPipelineBinaries = pipelineBinaries.data(); 26450 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 26451 m_device, 26452 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 26453 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26454 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 26455 } 26456 } 26457 26458 VULKAN_HPP_NAMESPACE::detail::resultCheck( 26459 result, 26460 VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", 26461 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); 26462 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator> uniquePipelineBinaries( 26463 pipelineBinaryKHRAllocator ); 26464 uniquePipelineBinaries.reserve( pipelineBinaries.size() ); 26465 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 26466 for ( auto const & pipelineBinary : pipelineBinaries ) 26467 { 26468 uniquePipelineBinaries.push_back( UniqueHandle<PipelineBinaryKHR, Dispatch>( pipelineBinary, deleter ) ); 26469 } 26470 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>>( 26471 result, std::move( uniquePipelineBinaries ) ); 26472 } 26473 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 26474 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26475 26476 template <typename Dispatch> destroyPipelineBinaryKHR(VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const26477 VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, 26478 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26479 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26480 { 26481 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26482 d.vkDestroyPipelineBinaryKHR( m_device, static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 26483 } 26484 26485 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26486 template <typename Dispatch> destroyPipelineBinaryKHR(VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26487 VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, 26488 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26489 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26490 { 26491 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26492 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26493 VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function <vkDestroyPipelineBinaryKHR> requires <VK_KHR_pipeline_binary>" ); 26494 # endif 26495 26496 d.vkDestroyPipelineBinaryKHR( 26497 m_device, 26498 static_cast<VkPipelineBinaryKHR>( pipelineBinary ), 26499 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 26500 } 26501 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26502 26503 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const26504 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, 26505 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26506 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26507 { 26508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26509 d.vkDestroyPipelineBinaryKHR( m_device, static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 26510 } 26511 26512 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26513 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26514 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, 26515 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26516 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26517 { 26518 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26519 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26520 VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function <vkDestroyPipelineBinaryKHR> requires <VK_KHR_pipeline_binary>" ); 26521 # endif 26522 26523 d.vkDestroyPipelineBinaryKHR( 26524 m_device, 26525 static_cast<VkPipelineBinaryKHR>( pipelineBinary ), 26526 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 26527 } 26528 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26529 26530 template <typename Dispatch> getPipelineKeyKHR(const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo,VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey,Dispatch const & d) const26531 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineKeyKHR( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo, 26532 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey, 26533 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26534 { 26535 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26536 return static_cast<Result>( d.vkGetPipelineKeyKHR( 26537 m_device, reinterpret_cast<const VkPipelineCreateInfoKHR *>( pPipelineCreateInfo ), reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineKey ) ) ); 26538 } 26539 26540 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26541 template <typename Dispatch> 26542 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR>::type getPipelineKeyKHR(Optional<const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR> pipelineCreateInfo,Dispatch const & d) const26543 Device::getPipelineKeyKHR( Optional<const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR> pipelineCreateInfo, Dispatch const & d ) const 26544 { 26545 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26546 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26547 VULKAN_HPP_ASSERT( d.vkGetPipelineKeyKHR && "Function <vkGetPipelineKeyKHR> requires <VK_KHR_pipeline_binary>" ); 26548 # endif 26549 26550 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey; 26551 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineKeyKHR( 26552 m_device, 26553 reinterpret_cast<const VkPipelineCreateInfoKHR *>( static_cast<const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR *>( pipelineCreateInfo ) ), 26554 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineKey ) ) ); 26555 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); 26556 26557 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineKey ) ); 26558 } 26559 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26560 26561 template <typename Dispatch> getPipelineBinaryDataKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo,VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey,size_t * pPipelineBinaryDataSize,void * pPipelineBinaryData,Dispatch const & d) const26562 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo, 26563 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey, 26564 size_t * pPipelineBinaryDataSize, 26565 void * pPipelineBinaryData, 26566 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26567 { 26568 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26569 return static_cast<Result>( d.vkGetPipelineBinaryDataKHR( m_device, 26570 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( pInfo ), 26571 reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineBinaryKey ), 26572 pPipelineBinaryDataSize, 26573 pPipelineBinaryData ) ); 26574 } 26575 26576 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26577 template <typename Uint8_tAllocator, typename Dispatch> 26578 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 26579 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getPipelineBinaryDataKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info,Dispatch const & d) const26580 Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d ) const 26581 { 26582 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26583 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26584 VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function <vkGetPipelineBinaryDataKHR> requires <VK_KHR_pipeline_binary>" ); 26585 # endif 26586 26587 std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>> data_; 26588 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; 26589 std::vector<uint8_t, Uint8_tAllocator> & pipelineBinaryData = data_.second; 26590 size_t pipelineBinaryDataSize; 26591 VULKAN_HPP_NAMESPACE::Result result = 26592 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineBinaryDataKHR( m_device, 26593 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), 26594 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), 26595 &pipelineBinaryDataSize, 26596 nullptr ) ); 26597 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 26598 { 26599 pipelineBinaryData.resize( pipelineBinaryDataSize ); 26600 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineBinaryDataKHR( m_device, 26601 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), 26602 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), 26603 &pipelineBinaryDataSize, 26604 reinterpret_cast<void *>( pipelineBinaryData.data() ) ) ); 26605 } 26606 26607 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); 26608 26609 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 26610 } 26611 26612 template <typename Uint8_tAllocator, 26613 typename Dispatch, 26614 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 26615 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 26616 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getPipelineBinaryDataKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const26617 Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, 26618 Uint8_tAllocator & uint8_tAllocator, 26619 Dispatch const & d ) const 26620 { 26621 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26622 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26623 VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function <vkGetPipelineBinaryDataKHR> requires <VK_KHR_pipeline_binary>" ); 26624 # endif 26625 26626 std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>> data_( 26627 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 26628 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; 26629 std::vector<uint8_t, Uint8_tAllocator> & pipelineBinaryData = data_.second; 26630 size_t pipelineBinaryDataSize; 26631 VULKAN_HPP_NAMESPACE::Result result = 26632 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineBinaryDataKHR( m_device, 26633 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), 26634 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), 26635 &pipelineBinaryDataSize, 26636 nullptr ) ); 26637 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 26638 { 26639 pipelineBinaryData.resize( pipelineBinaryDataSize ); 26640 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineBinaryDataKHR( m_device, 26641 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), 26642 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), 26643 &pipelineBinaryDataSize, 26644 reinterpret_cast<void *>( pipelineBinaryData.data() ) ) ); 26645 } 26646 26647 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); 26648 26649 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 26650 } 26651 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26652 26653 template <typename Dispatch> releaseCapturedPipelineDataKHR(const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const26654 VULKAN_HPP_INLINE Result Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo, 26655 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26656 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26657 { 26658 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26659 return static_cast<Result>( d.vkReleaseCapturedPipelineDataKHR( 26660 m_device, reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( pInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ) ); 26661 } 26662 26663 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26664 template <typename Dispatch> releaseCapturedPipelineDataKHR(const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26665 VULKAN_HPP_INLINE void Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, 26666 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26667 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26668 { 26669 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26670 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26671 VULKAN_HPP_ASSERT( d.vkReleaseCapturedPipelineDataKHR && "Function <vkReleaseCapturedPipelineDataKHR> requires <VK_KHR_pipeline_binary>" ); 26672 # endif 26673 26674 d.vkReleaseCapturedPipelineDataKHR( 26675 m_device, 26676 reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( &info ), 26677 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 26678 } 26679 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26680 26681 //=== VK_QCOM_tile_properties === 26682 26683 template <typename Dispatch> getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,uint32_t * pPropertiesCount,VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,Dispatch const & d) const26684 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 26685 uint32_t * pPropertiesCount, 26686 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, 26687 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26688 { 26689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26690 return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( 26691 m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); 26692 } 26693 26694 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26695 template <typename TilePropertiesQCOMAllocator, typename Dispatch> 26696 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Dispatch const & d) const26697 Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const 26698 { 26699 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26700 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26701 VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); 26702 # endif 26703 26704 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties; 26705 uint32_t propertiesCount; 26706 VULKAN_HPP_NAMESPACE::Result result; 26707 do 26708 { 26709 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26710 d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); 26711 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) 26712 { 26713 properties.resize( propertiesCount ); 26714 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM( 26715 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); 26716 } 26717 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26718 26719 VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); 26720 if ( propertiesCount < properties.size() ) 26721 { 26722 properties.resize( propertiesCount ); 26723 } 26724 return properties; 26725 } 26726 26727 template <typename TilePropertiesQCOMAllocator, 26728 typename Dispatch, 26729 typename std::enable_if<std::is_same<typename TilePropertiesQCOMAllocator::value_type, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, int>::type> 26730 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,Dispatch const & d) const26731 Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 26732 TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, 26733 Dispatch const & d ) const 26734 { 26735 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26736 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26737 VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); 26738 # endif 26739 26740 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator ); 26741 uint32_t propertiesCount; 26742 VULKAN_HPP_NAMESPACE::Result result; 26743 do 26744 { 26745 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26746 d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); 26747 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) 26748 { 26749 properties.resize( propertiesCount ); 26750 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM( 26751 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); 26752 } 26753 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26754 26755 VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); 26756 if ( propertiesCount < properties.size() ) 26757 { 26758 properties.resize( propertiesCount ); 26759 } 26760 return properties; 26761 } 26762 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26763 26764 template <typename Dispatch> getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,Dispatch const & d) const26765 VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 26766 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, 26767 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26768 { 26769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26770 return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( 26771 m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); 26772 } 26773 26774 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26775 template <typename Dispatch> 26776 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const26777 Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26778 { 26779 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26780 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26781 VULKAN_HPP_ASSERT( d.vkGetDynamicRenderingTilePropertiesQCOM && "Function <vkGetDynamicRenderingTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); 26782 # endif 26783 26784 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; 26785 d.vkGetDynamicRenderingTilePropertiesQCOM( 26786 m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) ); 26787 26788 return properties; 26789 } 26790 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26791 26792 //=== VK_NV_low_latency2 === 26793 26794 template <typename Dispatch> setLatencySleepModeNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo,Dispatch const & d) const26795 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 26796 const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, 26797 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26798 { 26799 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26800 return static_cast<Result>( 26801 d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) ); 26802 } 26803 26804 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26805 template <typename Dispatch> setLatencySleepModeNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo,Dispatch const & d) const26806 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 26807 const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, 26808 Dispatch const & d ) const 26809 { 26810 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26811 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26812 VULKAN_HPP_ASSERT( d.vkSetLatencySleepModeNV && "Function <vkSetLatencySleepModeNV> requires <VK_NV_low_latency2>" ); 26813 # endif 26814 26815 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26816 d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( &sleepModeInfo ) ) ); 26817 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); 26818 26819 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 26820 } 26821 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26822 26823 template <typename Dispatch> latencySleepNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo,Dispatch const & d) const26824 VULKAN_HPP_INLINE Result Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 26825 const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, 26826 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26827 { 26828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26829 return static_cast<Result>( 26830 d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) ); 26831 } 26832 26833 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26834 template <typename Dispatch> latencySleepNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo,Dispatch const & d) const26835 VULKAN_HPP_INLINE void Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 26836 const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, 26837 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26838 { 26839 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26840 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26841 VULKAN_HPP_ASSERT( d.vkLatencySleepNV && "Function <vkLatencySleepNV> requires <VK_NV_low_latency2>" ); 26842 # endif 26843 26844 d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( &sleepInfo ) ); 26845 } 26846 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26847 26848 template <typename Dispatch> setLatencyMarkerNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo,Dispatch const & d) const26849 VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 26850 const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, 26851 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26852 { 26853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26854 d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); 26855 } 26856 26857 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26858 template <typename Dispatch> setLatencyMarkerNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo,Dispatch const & d) const26859 VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 26860 const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, 26861 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26862 { 26863 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26864 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26865 VULKAN_HPP_ASSERT( d.vkSetLatencyMarkerNV && "Function <vkSetLatencyMarkerNV> requires <VK_NV_low_latency2>" ); 26866 # endif 26867 26868 d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 26869 } 26870 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26871 26872 template <typename Dispatch> getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo,Dispatch const & d) const26873 VULKAN_HPP_INLINE void Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 26874 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, 26875 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26876 { 26877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26878 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); 26879 } 26880 26881 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26882 template <typename LatencyTimingsFrameReportNVAllocator, typename Dispatch> 26883 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const26884 Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 26885 { 26886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26887 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26888 VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" ); 26889 # endif 26890 26891 std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> timings; 26892 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; 26893 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 26894 timings.resize( latencyMarkerInfo.timingCount ); 26895 latencyMarkerInfo.pTimings = timings.data(); 26896 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 26897 26898 return timings; 26899 } 26900 26901 template < 26902 typename LatencyTimingsFrameReportNVAllocator, 26903 typename Dispatch, 26904 typename std::enable_if<std::is_same<typename LatencyTimingsFrameReportNVAllocator::value_type, VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV>::value, 26905 int>::type> 26906 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator,Dispatch const & d) const26907 Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 26908 LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, 26909 Dispatch const & d ) const 26910 { 26911 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26912 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26913 VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" ); 26914 # endif 26915 26916 std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> timings( latencyTimingsFrameReportNVAllocator ); 26917 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; 26918 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 26919 timings.resize( latencyMarkerInfo.timingCount ); 26920 latencyMarkerInfo.pTimings = timings.data(); 26921 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 26922 26923 return timings; 26924 } 26925 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26926 26927 template <typename Dispatch> notifyOutOfBandNV(const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo,Dispatch const & d) const26928 VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, 26929 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26930 { 26931 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26932 d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) ); 26933 } 26934 26935 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26936 template <typename Dispatch> notifyOutOfBandNV(const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo,Dispatch const & d) const26937 VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, 26938 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26939 { 26940 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26941 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26942 VULKAN_HPP_ASSERT( d.vkQueueNotifyOutOfBandNV && "Function <vkQueueNotifyOutOfBandNV> requires <VK_NV_low_latency2>" ); 26943 # endif 26944 26945 d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( &queueTypeInfo ) ); 26946 } 26947 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26948 26949 //=== VK_KHR_cooperative_matrix === 26950 26951 template <typename Dispatch> getCooperativeMatrixPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties,Dispatch const & d) const26952 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR( 26953 uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26954 { 26955 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26956 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 26957 m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) ); 26958 } 26959 26960 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26961 template <typename CooperativeMatrixPropertiesKHRAllocator, typename Dispatch> 26962 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 26963 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type getCooperativeMatrixPropertiesKHR(Dispatch const & d) const26964 PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const 26965 { 26966 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26967 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26968 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && 26969 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" ); 26970 # endif 26971 26972 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties; 26973 uint32_t propertyCount; 26974 VULKAN_HPP_NAMESPACE::Result result; 26975 do 26976 { 26977 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 26978 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 26979 { 26980 properties.resize( propertyCount ); 26981 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 26982 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); 26983 } 26984 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26985 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); 26986 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 26987 if ( propertyCount < properties.size() ) 26988 { 26989 properties.resize( propertyCount ); 26990 } 26991 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 26992 } 26993 26994 template <typename CooperativeMatrixPropertiesKHRAllocator, 26995 typename Dispatch, 26996 typename std::enable_if< 26997 std::is_same<typename CooperativeMatrixPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>::value, 26998 int>::type> 26999 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 27000 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type getCooperativeMatrixPropertiesKHR(CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator,Dispatch const & d) const27001 PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, 27002 Dispatch const & d ) const 27003 { 27004 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27005 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27006 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && 27007 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" ); 27008 # endif 27009 27010 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties( 27011 cooperativeMatrixPropertiesKHRAllocator ); 27012 uint32_t propertyCount; 27013 VULKAN_HPP_NAMESPACE::Result result; 27014 do 27015 { 27016 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 27017 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 27018 { 27019 properties.resize( propertyCount ); 27020 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 27021 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); 27022 } 27023 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 27024 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); 27025 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 27026 if ( propertyCount < properties.size() ) 27027 { 27028 properties.resize( propertyCount ); 27029 } 27030 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 27031 } 27032 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27033 27034 //=== VK_EXT_attachment_feedback_loop_dynamic_state === 27035 27036 template <typename Dispatch> setAttachmentFeedbackLoopEnableEXT(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask,Dispatch const & d) const27037 VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, 27038 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27039 { 27040 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27041 d.vkCmdSetAttachmentFeedbackLoopEnableEXT( m_commandBuffer, static_cast<VkImageAspectFlags>( aspectMask ) ); 27042 } 27043 27044 #if defined( VK_USE_PLATFORM_SCREEN_QNX ) 27045 //=== VK_QNX_external_memory_screen_buffer === 27046 27047 template <typename Dispatch> getScreenBufferPropertiesQNX(const struct _screen_buffer * buffer,VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties,Dispatch const & d) const27048 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, 27049 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, 27050 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27051 { 27052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27053 return static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) ); 27054 } 27055 27056 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27057 template <typename Dispatch> 27058 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>::type getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const27059 Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const 27060 { 27061 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27062 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27063 VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" ); 27064 # endif 27065 27066 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; 27067 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27068 d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); 27069 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); 27070 27071 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 27072 } 27073 27074 template <typename X, typename Y, typename... Z, typename Dispatch> 27075 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const27076 Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const 27077 { 27078 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27079 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27080 VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" ); 27081 # endif 27082 27083 StructureChain<X, Y, Z...> structureChain; 27084 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>(); 27085 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27086 d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); 27087 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); 27088 27089 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 27090 } 27091 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27092 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ 27093 27094 //=== VK_KHR_line_rasterization === 27095 27096 template <typename Dispatch> 27097 VULKAN_HPP_INLINE void setLineStippleKHR(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const27098 CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27099 { 27100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27101 d.vkCmdSetLineStippleKHR( m_commandBuffer, lineStippleFactor, lineStipplePattern ); 27102 } 27103 27104 //=== VK_KHR_calibrated_timestamps === 27105 27106 template <typename Dispatch> getCalibrateableTimeDomainsKHR(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,Dispatch const & d) const27107 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, 27108 VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, 27109 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27110 { 27111 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27112 return static_cast<Result>( 27113 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); 27114 } 27115 27116 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27117 template <typename TimeDomainKHRAllocator, typename Dispatch> 27118 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsKHR(Dispatch const & d) const27119 PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const 27120 { 27121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27122 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27123 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && 27124 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 27125 # endif 27126 27127 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; 27128 uint32_t timeDomainCount; 27129 VULKAN_HPP_NAMESPACE::Result result; 27130 do 27131 { 27132 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); 27133 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 27134 { 27135 timeDomains.resize( timeDomainCount ); 27136 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27137 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 27138 } 27139 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 27140 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); 27141 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 27142 if ( timeDomainCount < timeDomains.size() ) 27143 { 27144 timeDomains.resize( timeDomainCount ); 27145 } 27146 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 27147 } 27148 27149 template <typename TimeDomainKHRAllocator, 27150 typename Dispatch, 27151 typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 27152 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsKHR(TimeDomainKHRAllocator & timeDomainKHRAllocator,Dispatch const & d) const27153 PhysicalDevice::getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const 27154 { 27155 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27156 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27157 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && 27158 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 27159 # endif 27160 27161 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); 27162 uint32_t timeDomainCount; 27163 VULKAN_HPP_NAMESPACE::Result result; 27164 do 27165 { 27166 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); 27167 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 27168 { 27169 timeDomains.resize( timeDomainCount ); 27170 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27171 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 27172 } 27173 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 27174 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); 27175 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 27176 if ( timeDomainCount < timeDomains.size() ) 27177 { 27178 timeDomains.resize( timeDomainCount ); 27179 } 27180 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 27181 } 27182 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27183 27184 template <typename Dispatch> getCalibratedTimestampsKHR(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const27185 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsKHR( uint32_t timestampCount, 27186 const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, 27187 uint64_t * pTimestamps, 27188 uint64_t * pMaxDeviation, 27189 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27190 { 27191 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27192 return static_cast<Result>( d.vkGetCalibratedTimestampsKHR( 27193 m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); 27194 } 27195 27196 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27197 template <typename Uint64_tAllocator, typename Dispatch> 27198 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Dispatch const & d) const27199 Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 27200 Dispatch const & d ) const 27201 { 27202 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27203 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27204 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && 27205 "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 27206 # endif 27207 27208 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 27209 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); 27210 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 27211 uint64_t & maxDeviation = data_.second; 27212 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR( 27213 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 27214 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); 27215 27216 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 27217 } 27218 27219 template <typename Uint64_tAllocator, 27220 typename Dispatch, 27221 typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type> 27222 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const27223 Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 27224 Uint64_tAllocator & uint64_tAllocator, 27225 Dispatch const & d ) const 27226 { 27227 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27228 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27229 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && 27230 "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 27231 # endif 27232 27233 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 27234 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); 27235 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 27236 uint64_t & maxDeviation = data_.second; 27237 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR( 27238 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 27239 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); 27240 27241 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 27242 } 27243 27244 template <typename Dispatch> 27245 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type getCalibratedTimestampKHR(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo,Dispatch const & d) const27246 Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const 27247 { 27248 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27249 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27250 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && 27251 "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 27252 # endif 27253 27254 std::pair<uint64_t, uint64_t> data_; 27255 uint64_t & timestamp = data_.first; 27256 uint64_t & maxDeviation = data_.second; 27257 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27258 d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); 27259 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); 27260 27261 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 27262 } 27263 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27264 27265 //=== VK_KHR_maintenance6 === 27266 27267 template <typename Dispatch> bindDescriptorSets2KHR(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo,Dispatch const & d) const27268 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, 27269 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27270 { 27271 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27272 d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfo *>( pBindDescriptorSetsInfo ) ); 27273 } 27274 27275 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27276 template <typename Dispatch> bindDescriptorSets2KHR(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo,Dispatch const & d) const27277 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, 27278 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27279 { 27280 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27281 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27282 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2KHR && "Function <vkCmdBindDescriptorSets2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 27283 # endif 27284 27285 d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfo *>( &bindDescriptorSetsInfo ) ); 27286 } 27287 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27288 27289 template <typename Dispatch> pushConstants2KHR(const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo,Dispatch const & d) const27290 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, 27291 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27292 { 27293 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27294 d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfo *>( pPushConstantsInfo ) ); 27295 } 27296 27297 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27298 template <typename Dispatch> pushConstants2KHR(const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo,Dispatch const & d) const27299 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, 27300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27301 { 27302 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27303 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27304 VULKAN_HPP_ASSERT( d.vkCmdPushConstants2KHR && "Function <vkCmdPushConstants2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 27305 # endif 27306 27307 d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfo *>( &pushConstantsInfo ) ); 27308 } 27309 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27310 27311 template <typename Dispatch> pushDescriptorSet2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo,Dispatch const & d) const27312 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, 27313 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27314 { 27315 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27316 d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfo *>( pPushDescriptorSetInfo ) ); 27317 } 27318 27319 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27320 template <typename Dispatch> pushDescriptorSet2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo,Dispatch const & d) const27321 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, 27322 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27323 { 27324 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27325 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27326 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2KHR && "Function <vkCmdPushDescriptorSet2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 27327 # endif 27328 27329 d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfo *>( &pushDescriptorSetInfo ) ); 27330 } 27331 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27332 27333 template <typename Dispatch> 27334 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo,Dispatch const & d) const27335 CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, 27336 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27337 { 27338 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27339 d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, 27340 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( pPushDescriptorSetWithTemplateInfo ) ); 27341 } 27342 27343 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27344 template <typename Dispatch> 27345 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo,Dispatch const & d) const27346 CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, 27347 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27348 { 27349 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27350 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27351 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2KHR && 27352 "Function <vkCmdPushDescriptorSetWithTemplate2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 27353 # endif 27354 27355 d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, 27356 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( &pushDescriptorSetWithTemplateInfo ) ); 27357 } 27358 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27359 27360 template <typename Dispatch> 27361 VULKAN_HPP_INLINE void setDescriptorBufferOffsets2EXT(const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo,Dispatch const & d) const27362 CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, 27363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27364 { 27365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27366 d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) ); 27367 } 27368 27369 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27370 template <typename Dispatch> 27371 VULKAN_HPP_INLINE void setDescriptorBufferOffsets2EXT(const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo,Dispatch const & d) const27372 CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, 27373 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27374 { 27375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27376 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27377 VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsets2EXT && "Function <vkCmdSetDescriptorBufferOffsets2EXT> requires <VK_KHR_maintenance6>" ); 27378 # endif 27379 27380 d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( &setDescriptorBufferOffsetsInfo ) ); 27381 } 27382 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27383 27384 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplers2EXT(const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo,Dispatch const & d) const27385 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( 27386 const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, 27387 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27388 { 27389 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27390 d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( 27391 m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) ); 27392 } 27393 27394 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27395 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplers2EXT(const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo,Dispatch const & d) const27396 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( 27397 const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, 27398 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27399 { 27400 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27401 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27402 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT && 27403 "Function <vkCmdBindDescriptorBufferEmbeddedSamplers2EXT> requires <VK_KHR_maintenance6>" ); 27404 # endif 27405 27406 d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( 27407 m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( &bindDescriptorBufferEmbeddedSamplersInfo ) ); 27408 } 27409 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27410 27411 } // namespace VULKAN_HPP_NAMESPACE 27412 #endif 27413