1 // Copyright 2015-2024 The Khronos Group Inc. 2 // 3 // SPDX-License-Identifier: Apache-2.0 OR MIT 4 // 5 6 // This header is generated from the Khronos Vulkan XML API Registry. 7 8 #ifndef VULKAN_FUNCS_HPP 9 #define VULKAN_FUNCS_HPP 10 11 // include-what-you-use: make sure, vulkan.hpp is used by code-completers 12 // IWYU pragma: private; include "vulkan.hpp" 13 14 namespace VULKAN_HPP_NAMESPACE 15 { 16 17 //=========================== 18 //=== COMMAND Definitions === 19 //=========================== 20 21 //=== VK_VERSION_1_0 === 22 23 template <typename Dispatch> createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Instance * pInstance,Dispatch const & d)24 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, 25 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26 VULKAN_HPP_NAMESPACE::Instance * pInstance, 27 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 28 { 29 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 30 return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), 31 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 32 reinterpret_cast<VkInstance *>( pInstance ) ) ); 33 } 34 35 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 36 template <typename Dispatch> createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)37 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( 38 const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) 39 { 40 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 41 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 42 VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" ); 43 # endif 44 45 VULKAN_HPP_NAMESPACE::Instance instance; 46 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 47 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 48 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 49 reinterpret_cast<VkInstance *>( &instance ) ) ); 50 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); 51 52 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( instance ) ); 53 } 54 55 # ifndef VULKAN_HPP_NO_SMART_HANDLE 56 template <typename Dispatch> createInstanceUnique(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)57 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( 58 const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) 59 { 60 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 61 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 62 VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" ); 63 # endif 64 65 VULKAN_HPP_NAMESPACE::Instance instance; 66 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 67 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 68 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 69 reinterpret_cast<VkInstance *>( &instance ) ) ); 70 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); 71 72 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 73 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, detail::ObjectDestroy<detail::NoParent, Dispatch>( allocator, d ) ) ); 74 } 75 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 76 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 77 78 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const79 VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 80 { 81 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 82 d.vkDestroyInstance( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 83 } 84 85 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 86 template <typename Dispatch> destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const87 VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 88 { 89 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 90 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 91 VULKAN_HPP_ASSERT( d.vkDestroyInstance && "Function <vkDestroyInstance> requires <VK_VERSION_1_0>" ); 92 # endif 93 94 d.vkDestroyInstance( m_instance, 95 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 96 } 97 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 98 99 template <typename Dispatch> enumeratePhysicalDevices(uint32_t * pPhysicalDeviceCount,VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,Dispatch const & d) const100 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, 101 VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, 102 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 103 { 104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 105 return static_cast<Result>( 106 d.vkEnumeratePhysicalDevices( static_cast<VkInstance>( m_instance ), pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) ); 107 } 108 109 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 110 template <typename PhysicalDeviceAllocator, 111 typename Dispatch, 112 typename std::enable_if<std::is_same<typename PhysicalDeviceAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDevice>::value, int>::type> 113 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(Dispatch const & d) const114 Instance::enumeratePhysicalDevices( Dispatch const & d ) const 115 { 116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 117 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 118 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" ); 119 # endif 120 121 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices; 122 uint32_t physicalDeviceCount; 123 VULKAN_HPP_NAMESPACE::Result result; 124 do 125 { 126 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 127 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) 128 { 129 physicalDevices.resize( physicalDeviceCount ); 130 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 131 d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 132 } 133 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 134 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 135 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 136 if ( physicalDeviceCount < physicalDevices.size() ) 137 { 138 physicalDevices.resize( physicalDeviceCount ); 139 } 140 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); 141 } 142 143 template <typename PhysicalDeviceAllocator, 144 typename Dispatch, 145 typename std::enable_if<std::is_same<typename PhysicalDeviceAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDevice>::value, int>::type> 146 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(PhysicalDeviceAllocator & physicalDeviceAllocator,Dispatch const & d) const147 Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const 148 { 149 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 150 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 151 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" ); 152 # endif 153 154 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator ); 155 uint32_t physicalDeviceCount; 156 VULKAN_HPP_NAMESPACE::Result result; 157 do 158 { 159 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 160 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) 161 { 162 physicalDevices.resize( physicalDeviceCount ); 163 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 164 d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 165 } 166 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 167 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 168 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 169 if ( physicalDeviceCount < physicalDevices.size() ) 170 { 171 physicalDevices.resize( physicalDeviceCount ); 172 } 173 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); 174 } 175 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 176 177 template <typename Dispatch> getFeatures(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,Dispatch const & d) const178 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 179 { 180 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 181 d.vkGetPhysicalDeviceFeatures( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) ); 182 } 183 184 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 185 template <typename Dispatch> 186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const & d) const187 PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 188 { 189 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 190 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 191 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures && "Function <vkGetPhysicalDeviceFeatures> requires <VK_VERSION_1_0>" ); 192 # endif 193 194 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; 195 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) ); 196 197 return features; 198 } 199 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 200 201 template <typename Dispatch> getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,Dispatch const & d) const202 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 203 VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, 204 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 205 { 206 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 207 d.vkGetPhysicalDeviceFormatProperties( 208 static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) ); 209 } 210 211 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 212 template <typename Dispatch> 213 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const214 PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 215 { 216 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 217 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 218 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties && "Function <vkGetPhysicalDeviceFormatProperties> requires <VK_VERSION_1_0>" ); 219 # endif 220 221 VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; 222 d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) ); 223 224 return formatProperties; 225 } 226 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 227 228 template <typename Dispatch> getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,Dispatch const & d) const229 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 230 VULKAN_HPP_NAMESPACE::ImageType type, 231 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 232 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 233 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 234 VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, 235 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 236 { 237 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 238 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), 239 static_cast<VkFormat>( format ), 240 static_cast<VkImageType>( type ), 241 static_cast<VkImageTiling>( tiling ), 242 static_cast<VkImageUsageFlags>( usage ), 243 static_cast<VkImageCreateFlags>( flags ), 244 reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) ); 245 } 246 247 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 248 template <typename Dispatch> 249 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,Dispatch const & d) const250 PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 251 VULKAN_HPP_NAMESPACE::ImageType type, 252 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 253 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 254 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 255 Dispatch const & d ) const 256 { 257 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 258 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 259 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties && "Function <vkGetPhysicalDeviceImageFormatProperties> requires <VK_VERSION_1_0>" ); 260 # endif 261 262 VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; 263 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 264 d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, 265 static_cast<VkFormat>( format ), 266 static_cast<VkImageType>( type ), 267 static_cast<VkImageTiling>( tiling ), 268 static_cast<VkImageUsageFlags>( usage ), 269 static_cast<VkImageCreateFlags>( flags ), 270 reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) ); 271 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); 272 273 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 274 } 275 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 276 277 template <typename Dispatch> getProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,Dispatch const & d) const278 VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, 279 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 280 { 281 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 282 d.vkGetPhysicalDeviceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) ); 283 } 284 285 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 286 template <typename Dispatch> 287 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const & d) const288 PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 289 { 290 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 291 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 292 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties && "Function <vkGetPhysicalDeviceProperties> requires <VK_VERSION_1_0>" ); 293 # endif 294 295 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; 296 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) ); 297 298 return properties; 299 } 300 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 301 302 template <typename Dispatch> getQueueFamilyProperties(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,Dispatch const & d) const303 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, 304 VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, 305 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 306 { 307 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 308 d.vkGetPhysicalDeviceQueueFamilyProperties( 309 static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) ); 310 } 311 312 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 313 template < 314 typename QueueFamilyPropertiesAllocator, 315 typename Dispatch, 316 typename std::enable_if<std::is_same<typename QueueFamilyPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, int>::type> 317 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(Dispatch const & d) const318 PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const 319 { 320 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 321 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 322 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" ); 323 # endif 324 325 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties; 326 uint32_t queueFamilyPropertyCount; 327 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 328 queueFamilyProperties.resize( queueFamilyPropertyCount ); 329 d.vkGetPhysicalDeviceQueueFamilyProperties( 330 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 331 332 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 333 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 334 { 335 queueFamilyProperties.resize( queueFamilyPropertyCount ); 336 } 337 return queueFamilyProperties; 338 } 339 340 template < 341 typename QueueFamilyPropertiesAllocator, 342 typename Dispatch, 343 typename std::enable_if<std::is_same<typename QueueFamilyPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, int>::type> 344 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,Dispatch const & d) const345 PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const 346 { 347 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 348 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 349 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" ); 350 # endif 351 352 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator ); 353 uint32_t queueFamilyPropertyCount; 354 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 355 queueFamilyProperties.resize( queueFamilyPropertyCount ); 356 d.vkGetPhysicalDeviceQueueFamilyProperties( 357 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 358 359 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 360 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 361 { 362 queueFamilyProperties.resize( queueFamilyPropertyCount ); 363 } 364 return queueFamilyProperties; 365 } 366 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 367 368 template <typename Dispatch> getMemoryProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,Dispatch const & d) const369 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, 370 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 371 { 372 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 373 d.vkGetPhysicalDeviceMemoryProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), 374 reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) ); 375 } 376 377 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 378 template <typename Dispatch> 379 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const & d) const380 PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 381 { 382 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 383 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 384 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties && "Function <vkGetPhysicalDeviceMemoryProperties> requires <VK_VERSION_1_0>" ); 385 # endif 386 387 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; 388 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) ); 389 390 return memoryProperties; 391 } 392 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 393 394 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const395 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 396 { 397 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 398 return d.vkGetInstanceProcAddr( static_cast<VkInstance>( m_instance ), pName ); 399 } 400 401 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 402 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const403 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 404 { 405 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 406 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 407 VULKAN_HPP_ASSERT( d.vkGetInstanceProcAddr && "Function <vkGetInstanceProcAddr> requires <VK_VERSION_1_0>" ); 408 # endif 409 410 PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() ); 411 412 return result; 413 } 414 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 415 416 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const417 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 418 { 419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 420 return d.vkGetDeviceProcAddr( static_cast<VkDevice>( m_device ), pName ); 421 } 422 423 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 424 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const425 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 426 { 427 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 428 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 429 VULKAN_HPP_ASSERT( d.vkGetDeviceProcAddr && "Function <vkGetDeviceProcAddr> requires <VK_VERSION_1_0>" ); 430 # endif 431 432 PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() ); 433 434 return result; 435 } 436 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 437 438 template <typename Dispatch> createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Device * pDevice,Dispatch const & d) const439 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, 440 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 441 VULKAN_HPP_NAMESPACE::Device * pDevice, 442 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 443 { 444 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 445 return static_cast<Result>( d.vkCreateDevice( static_cast<VkPhysicalDevice>( m_physicalDevice ), 446 reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), 447 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 448 reinterpret_cast<VkDevice *>( pDevice ) ) ); 449 } 450 451 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 452 template <typename Dispatch> createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const453 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( 454 const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 455 { 456 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 457 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 458 VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" ); 459 # endif 460 461 VULKAN_HPP_NAMESPACE::Device device; 462 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 463 d.vkCreateDevice( m_physicalDevice, 464 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 465 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 466 reinterpret_cast<VkDevice *>( &device ) ) ); 467 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); 468 469 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( device ) ); 470 } 471 472 # ifndef VULKAN_HPP_NO_SMART_HANDLE 473 template <typename Dispatch> 474 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type createDeviceUnique(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const475 PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, 476 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 477 Dispatch const & d ) const 478 { 479 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 480 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 481 VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" ); 482 # endif 483 484 VULKAN_HPP_NAMESPACE::Device device; 485 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 486 d.vkCreateDevice( m_physicalDevice, 487 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 488 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 489 reinterpret_cast<VkDevice *>( &device ) ) ); 490 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); 491 492 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 493 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, detail::ObjectDestroy<detail::NoParent, Dispatch>( allocator, d ) ) ); 494 } 495 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 496 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 497 498 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const499 VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 500 { 501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 502 d.vkDestroyDevice( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 503 } 504 505 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 506 template <typename Dispatch> destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const507 VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 508 { 509 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 510 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 511 VULKAN_HPP_ASSERT( d.vkDestroyDevice && "Function <vkDestroyDevice> requires <VK_VERSION_1_0>" ); 512 # endif 513 514 d.vkDestroyDevice( m_device, 515 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 516 } 517 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 518 519 template <typename Dispatch> enumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d)520 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, 521 uint32_t * pPropertyCount, 522 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 523 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 524 { 525 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 526 return static_cast<Result>( 527 d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 528 } 529 530 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 531 template < 532 typename ExtensionPropertiesAllocator, 533 typename Dispatch, 534 typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 535 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d)536 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) 537 { 538 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 539 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 540 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" ); 541 # endif 542 543 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; 544 uint32_t propertyCount; 545 VULKAN_HPP_NAMESPACE::Result result; 546 do 547 { 548 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 549 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 550 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 551 { 552 properties.resize( propertyCount ); 553 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties( 554 layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 555 } 556 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 557 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 558 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 559 if ( propertyCount < properties.size() ) 560 { 561 properties.resize( propertyCount ); 562 } 563 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 564 } 565 566 template < 567 typename ExtensionPropertiesAllocator, 568 typename Dispatch, 569 typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 570 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d)571 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, 572 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 573 Dispatch const & d ) 574 { 575 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 576 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 577 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" ); 578 # endif 579 580 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 581 uint32_t propertyCount; 582 VULKAN_HPP_NAMESPACE::Result result; 583 do 584 { 585 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 586 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 587 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 588 { 589 properties.resize( propertyCount ); 590 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties( 591 layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 592 } 593 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 594 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 595 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 596 if ( propertyCount < properties.size() ) 597 { 598 properties.resize( propertyCount ); 599 } 600 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 601 } 602 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 603 604 template <typename Dispatch> enumerateDeviceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d) const605 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, 606 uint32_t * pPropertyCount, 607 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 608 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 609 { 610 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 611 return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( 612 static_cast<VkPhysicalDevice>( m_physicalDevice ), pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 613 } 614 615 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 616 template < 617 typename ExtensionPropertiesAllocator, 618 typename Dispatch, 619 typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 620 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d) const621 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const 622 { 623 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 624 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 625 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" ); 626 # endif 627 628 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; 629 uint32_t propertyCount; 630 VULKAN_HPP_NAMESPACE::Result result; 631 do 632 { 633 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 634 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 635 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 636 { 637 properties.resize( propertyCount ); 638 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties( 639 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 640 } 641 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 642 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 643 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 644 if ( propertyCount < properties.size() ) 645 { 646 properties.resize( propertyCount ); 647 } 648 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 649 } 650 651 template < 652 typename ExtensionPropertiesAllocator, 653 typename Dispatch, 654 typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 655 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d) const656 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, 657 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 658 Dispatch const & d ) const 659 { 660 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 661 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 662 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" ); 663 # endif 664 665 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 666 uint32_t propertyCount; 667 VULKAN_HPP_NAMESPACE::Result result; 668 do 669 { 670 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 671 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 672 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 673 { 674 properties.resize( propertyCount ); 675 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties( 676 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 677 } 678 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 679 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 680 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 681 if ( propertyCount < properties.size() ) 682 { 683 properties.resize( propertyCount ); 684 } 685 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 686 } 687 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 688 689 template <typename Dispatch> enumerateInstanceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d)690 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, 691 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 692 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 693 { 694 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 695 return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 696 } 697 698 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 699 template <typename LayerPropertiesAllocator, 700 typename Dispatch, 701 typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 702 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(Dispatch const & d)703 enumerateInstanceLayerProperties( Dispatch const & d ) 704 { 705 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 706 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 707 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" ); 708 # endif 709 710 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; 711 uint32_t propertyCount; 712 VULKAN_HPP_NAMESPACE::Result result; 713 do 714 { 715 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 716 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 717 { 718 properties.resize( propertyCount ); 719 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 720 d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 721 } 722 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 723 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 724 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 725 if ( propertyCount < properties.size() ) 726 { 727 properties.resize( propertyCount ); 728 } 729 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 730 } 731 732 template <typename LayerPropertiesAllocator, 733 typename Dispatch, 734 typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 735 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d)736 enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) 737 { 738 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 739 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 740 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" ); 741 # endif 742 743 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 744 uint32_t propertyCount; 745 VULKAN_HPP_NAMESPACE::Result result; 746 do 747 { 748 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 749 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 750 { 751 properties.resize( propertyCount ); 752 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 753 d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 754 } 755 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 756 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 757 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 758 if ( propertyCount < properties.size() ) 759 { 760 properties.resize( propertyCount ); 761 } 762 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 763 } 764 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 765 766 template <typename Dispatch> enumerateDeviceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d) const767 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, 768 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 769 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 770 { 771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 772 return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( 773 static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 774 } 775 776 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 777 template <typename LayerPropertiesAllocator, 778 typename Dispatch, 779 typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 780 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(Dispatch const & d) const781 PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const 782 { 783 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 784 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 785 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" ); 786 # endif 787 788 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; 789 uint32_t propertyCount; 790 VULKAN_HPP_NAMESPACE::Result result; 791 do 792 { 793 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 794 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 795 { 796 properties.resize( propertyCount ); 797 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 798 d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 799 } 800 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 801 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 802 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 803 if ( propertyCount < properties.size() ) 804 { 805 properties.resize( propertyCount ); 806 } 807 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 808 } 809 810 template <typename LayerPropertiesAllocator, 811 typename Dispatch, 812 typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 813 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d) const814 PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const 815 { 816 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 817 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 818 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" ); 819 # endif 820 821 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 822 uint32_t propertyCount; 823 VULKAN_HPP_NAMESPACE::Result result; 824 do 825 { 826 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 827 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 828 { 829 properties.resize( propertyCount ); 830 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 831 d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 832 } 833 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 834 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 835 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 836 if ( propertyCount < properties.size() ) 837 { 838 properties.resize( propertyCount ); 839 } 840 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 841 } 842 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 843 844 template <typename Dispatch> 845 VULKAN_HPP_INLINE void getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const846 Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 847 { 848 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 849 d.vkGetDeviceQueue( static_cast<VkDevice>( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) ); 850 } 851 852 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 853 template <typename Dispatch> 854 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,Dispatch const & d) const855 Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 856 { 857 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 858 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 859 VULKAN_HPP_ASSERT( d.vkGetDeviceQueue && "Function <vkGetDeviceQueue> requires <VK_VERSION_1_0>" ); 860 # endif 861 862 VULKAN_HPP_NAMESPACE::Queue queue; 863 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) ); 864 865 return queue; 866 } 867 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 868 869 template <typename Dispatch> submit(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const870 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, 871 const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, 872 VULKAN_HPP_NAMESPACE::Fence fence, 873 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 874 { 875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 876 return static_cast<Result>( 877 d.vkQueueSubmit( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 878 } 879 880 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 881 template <typename Dispatch> submit(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const882 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( 883 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 884 { 885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 886 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 887 VULKAN_HPP_ASSERT( d.vkQueueSubmit && "Function <vkQueueSubmit> requires <VK_VERSION_1_0>" ); 888 # endif 889 890 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 891 d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 892 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); 893 894 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 895 } 896 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 897 898 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 899 template <typename Dispatch> waitIdle(Dispatch const & d) const900 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 901 { 902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 903 return static_cast<Result>( d.vkQueueWaitIdle( static_cast<VkQueue>( m_queue ) ) ); 904 } 905 #else 906 template <typename Dispatch> waitIdle(Dispatch const & d) const907 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const 908 { 909 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 910 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 911 VULKAN_HPP_ASSERT( d.vkQueueWaitIdle && "Function <vkQueueWaitIdle> requires <VK_VERSION_1_0>" ); 912 # endif 913 914 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueueWaitIdle( m_queue ) ); 915 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); 916 917 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 918 } 919 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 920 921 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 922 template <typename Dispatch> waitIdle(Dispatch const & d) const923 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 924 { 925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 926 return static_cast<Result>( d.vkDeviceWaitIdle( static_cast<VkDevice>( m_device ) ) ); 927 } 928 #else 929 template <typename Dispatch> waitIdle(Dispatch const & d) const930 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const 931 { 932 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 933 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 934 VULKAN_HPP_ASSERT( d.vkDeviceWaitIdle && "Function <vkDeviceWaitIdle> requires <VK_VERSION_1_0>" ); 935 # endif 936 937 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeviceWaitIdle( m_device ) ); 938 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); 939 940 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 941 } 942 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 943 944 template <typename Dispatch> allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,Dispatch const & d) const945 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, 946 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 947 VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, 948 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 949 { 950 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 951 return static_cast<Result>( d.vkAllocateMemory( static_cast<VkDevice>( m_device ), 952 reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), 953 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 954 reinterpret_cast<VkDeviceMemory *>( pMemory ) ) ); 955 } 956 957 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 958 template <typename Dispatch> 959 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const960 Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, 961 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 962 Dispatch const & d ) const 963 { 964 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 965 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 966 VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" ); 967 # endif 968 969 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 970 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 971 d.vkAllocateMemory( m_device, 972 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 973 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 974 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 975 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); 976 977 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memory ) ); 978 } 979 980 # ifndef VULKAN_HPP_NO_SMART_HANDLE 981 template <typename Dispatch> 982 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type allocateMemoryUnique(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const983 Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, 984 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 985 Dispatch const & d ) const 986 { 987 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 988 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 989 VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" ); 990 # endif 991 992 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 993 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 994 d.vkAllocateMemory( m_device, 995 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 996 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 997 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 998 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); 999 1000 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1001 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, detail::ObjectFree<Device, Dispatch>( *this, allocator, d ) ) ); 1002 } 1003 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1004 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1005 1006 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1007 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1008 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1009 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1010 { 1011 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1012 d.vkFreeMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1013 } 1014 1015 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1016 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1017 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1018 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1019 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1020 { 1021 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1022 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1023 VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" ); 1024 # endif 1025 1026 d.vkFreeMemory( m_device, 1027 static_cast<VkDeviceMemory>( memory ), 1028 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1029 } 1030 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1031 1032 template <typename Dispatch> 1033 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1034 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1035 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1036 { 1037 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1038 d.vkFreeMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1039 } 1040 1041 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1042 template <typename Dispatch> 1043 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1044 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1045 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1046 { 1047 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1048 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1049 VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" ); 1050 # endif 1051 1052 d.vkFreeMemory( m_device, 1053 static_cast<VkDeviceMemory>( memory ), 1054 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1055 } 1056 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1057 1058 template <typename Dispatch> mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,void ** ppData,Dispatch const & d) const1059 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1060 VULKAN_HPP_NAMESPACE::DeviceSize offset, 1061 VULKAN_HPP_NAMESPACE::DeviceSize size, 1062 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 1063 void ** ppData, 1064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1065 { 1066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1067 return static_cast<Result>( d.vkMapMemory( static_cast<VkDevice>( m_device ), 1068 static_cast<VkDeviceMemory>( memory ), 1069 static_cast<VkDeviceSize>( offset ), 1070 static_cast<VkDeviceSize>( size ), 1071 static_cast<VkMemoryMapFlags>( flags ), 1072 ppData ) ); 1073 } 1074 1075 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1076 template <typename Dispatch> mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,Dispatch const & d) const1077 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1078 VULKAN_HPP_NAMESPACE::DeviceSize offset, 1079 VULKAN_HPP_NAMESPACE::DeviceSize size, 1080 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 1081 Dispatch const & d ) const 1082 { 1083 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1084 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1085 VULKAN_HPP_ASSERT( d.vkMapMemory && "Function <vkMapMemory> requires <VK_VERSION_1_0>" ); 1086 # endif 1087 1088 void * pData; 1089 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory( m_device, 1090 static_cast<VkDeviceMemory>( memory ), 1091 static_cast<VkDeviceSize>( offset ), 1092 static_cast<VkDeviceSize>( size ), 1093 static_cast<VkMemoryMapFlags>( flags ), 1094 &pData ) ); 1095 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); 1096 1097 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); 1098 } 1099 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1100 1101 template <typename Dispatch> unmapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const1102 VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1103 { 1104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1105 d.vkUnmapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ) ); 1106 } 1107 1108 template <typename Dispatch> flushMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const1109 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, 1110 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 1111 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1112 { 1113 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1114 return static_cast<Result>( 1115 d.vkFlushMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 1116 } 1117 1118 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1119 template <typename Dispatch> 1120 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type flushMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const1121 Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, 1122 Dispatch const & d ) const 1123 { 1124 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1125 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1126 VULKAN_HPP_ASSERT( d.vkFlushMappedMemoryRanges && "Function <vkFlushMappedMemoryRanges> requires <VK_VERSION_1_0>" ); 1127 # endif 1128 1129 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1130 d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 1131 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); 1132 1133 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1134 } 1135 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1136 1137 template <typename Dispatch> invalidateMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const1138 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, 1139 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 1140 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1141 { 1142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1143 return static_cast<Result>( 1144 d.vkInvalidateMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 1145 } 1146 1147 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1148 template <typename Dispatch> 1149 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type invalidateMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const1150 Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, 1151 Dispatch const & d ) const 1152 { 1153 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1154 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1155 VULKAN_HPP_ASSERT( d.vkInvalidateMappedMemoryRanges && "Function <vkInvalidateMappedMemoryRanges> requires <VK_VERSION_1_0>" ); 1156 # endif 1157 1158 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1159 d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 1160 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); 1161 1162 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1163 } 1164 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1165 1166 template <typename Dispatch> getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,Dispatch const & d) const1167 VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1168 VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, 1169 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1170 { 1171 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1172 d.vkGetDeviceMemoryCommitment( 1173 static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) ); 1174 } 1175 1176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1177 template <typename Dispatch> getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const1178 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1179 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1180 { 1181 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1182 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1183 VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryCommitment && "Function <vkGetDeviceMemoryCommitment> requires <VK_VERSION_1_0>" ); 1184 # endif 1185 1186 VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; 1187 d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) ); 1188 1189 return committedMemoryInBytes; 1190 } 1191 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1192 1193 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1194 template <typename Dispatch> bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1195 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, 1196 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1197 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1198 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1199 { 1200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1201 return static_cast<Result>( d.vkBindBufferMemory( 1202 static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1203 } 1204 #else 1205 template <typename Dispatch> bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1206 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( 1207 VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const 1208 { 1209 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1210 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1211 VULKAN_HPP_ASSERT( d.vkBindBufferMemory && "Function <vkBindBufferMemory> requires <VK_VERSION_1_0>" ); 1212 # endif 1213 1214 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1215 d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1216 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); 1217 1218 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1219 } 1220 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1221 1222 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1223 template <typename Dispatch> bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1224 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, 1225 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1226 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1227 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1228 { 1229 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1230 return static_cast<Result>( d.vkBindImageMemory( 1231 static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1232 } 1233 #else 1234 template <typename Dispatch> bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1235 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( 1236 VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const 1237 { 1238 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1239 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1240 VULKAN_HPP_ASSERT( d.vkBindImageMemory && "Function <vkBindImageMemory> requires <VK_VERSION_1_0>" ); 1241 # endif 1242 1243 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1244 d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1245 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); 1246 1247 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1248 } 1249 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1250 1251 template <typename Dispatch> getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1252 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, 1253 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1254 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1255 { 1256 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1257 d.vkGetBufferMemoryRequirements( 1258 static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1259 } 1260 1261 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1262 template <typename Dispatch> 1263 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,Dispatch const & d) const1264 Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1265 { 1266 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1267 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1268 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements && "Function <vkGetBufferMemoryRequirements> requires <VK_VERSION_1_0>" ); 1269 # endif 1270 1271 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1272 d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1273 1274 return memoryRequirements; 1275 } 1276 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1277 1278 template <typename Dispatch> getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1279 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1280 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1282 { 1283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1284 d.vkGetImageMemoryRequirements( 1285 static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1286 } 1287 1288 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1289 template <typename Dispatch> 1290 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1291 Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1292 { 1293 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1294 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1295 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements && "Function <vkGetImageMemoryRequirements> requires <VK_VERSION_1_0>" ); 1296 # endif 1297 1298 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1299 d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1300 1301 return memoryRequirements; 1302 } 1303 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1304 1305 template <typename Dispatch> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,Dispatch const & d) const1306 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1307 uint32_t * pSparseMemoryRequirementCount, 1308 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, 1309 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1310 { 1311 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1312 d.vkGetImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), 1313 static_cast<VkImage>( image ), 1314 pSparseMemoryRequirementCount, 1315 reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) ); 1316 } 1317 1318 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1319 template <typename SparseImageMemoryRequirementsAllocator, 1320 typename Dispatch, 1321 typename std::enable_if< 1322 std::is_same<typename SparseImageMemoryRequirementsAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, 1323 int>::type> 1324 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1325 Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 1326 { 1327 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1328 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1329 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" ); 1330 # endif 1331 1332 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements; 1333 uint32_t sparseMemoryRequirementCount; 1334 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1335 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1336 d.vkGetImageSparseMemoryRequirements( m_device, 1337 static_cast<VkImage>( image ), 1338 &sparseMemoryRequirementCount, 1339 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1340 1341 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1342 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 1343 { 1344 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1345 } 1346 return sparseMemoryRequirements; 1347 } 1348 1349 template <typename SparseImageMemoryRequirementsAllocator, 1350 typename Dispatch, 1351 typename std::enable_if< 1352 std::is_same<typename SparseImageMemoryRequirementsAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, 1353 int>::type> 1354 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,Dispatch const & d) const1355 Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1356 SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, 1357 Dispatch const & d ) const 1358 { 1359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1360 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1361 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" ); 1362 # endif 1363 1364 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( 1365 sparseImageMemoryRequirementsAllocator ); 1366 uint32_t sparseMemoryRequirementCount; 1367 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1368 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1369 d.vkGetImageSparseMemoryRequirements( m_device, 1370 static_cast<VkImage>( image ), 1371 &sparseMemoryRequirementCount, 1372 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1373 1374 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1375 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 1376 { 1377 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1378 } 1379 return sparseMemoryRequirements; 1380 } 1381 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1382 1383 template <typename Dispatch> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,Dispatch const & d) const1384 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1385 VULKAN_HPP_NAMESPACE::ImageType type, 1386 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1387 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1388 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1389 uint32_t * pPropertyCount, 1390 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, 1391 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1392 { 1393 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1394 d.vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), 1395 static_cast<VkFormat>( format ), 1396 static_cast<VkImageType>( type ), 1397 static_cast<VkSampleCountFlagBits>( samples ), 1398 static_cast<VkImageUsageFlags>( usage ), 1399 static_cast<VkImageTiling>( tiling ), 1400 pPropertyCount, 1401 reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) ); 1402 } 1403 1404 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1405 template < 1406 typename SparseImageFormatPropertiesAllocator, 1407 typename Dispatch, 1408 typename std::enable_if<std::is_same<typename SparseImageFormatPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, 1409 int>::type> 1410 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,Dispatch const & d) const1411 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1412 VULKAN_HPP_NAMESPACE::ImageType type, 1413 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1414 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1415 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1416 Dispatch const & d ) const 1417 { 1418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1419 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1420 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && 1421 "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" ); 1422 # endif 1423 1424 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties; 1425 uint32_t propertyCount; 1426 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1427 static_cast<VkFormat>( format ), 1428 static_cast<VkImageType>( type ), 1429 static_cast<VkSampleCountFlagBits>( samples ), 1430 static_cast<VkImageUsageFlags>( usage ), 1431 static_cast<VkImageTiling>( tiling ), 1432 &propertyCount, 1433 nullptr ); 1434 properties.resize( propertyCount ); 1435 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1436 static_cast<VkFormat>( format ), 1437 static_cast<VkImageType>( type ), 1438 static_cast<VkSampleCountFlagBits>( samples ), 1439 static_cast<VkImageUsageFlags>( usage ), 1440 static_cast<VkImageTiling>( tiling ), 1441 &propertyCount, 1442 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1443 1444 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1445 if ( propertyCount < properties.size() ) 1446 { 1447 properties.resize( propertyCount ); 1448 } 1449 return properties; 1450 } 1451 1452 template < 1453 typename SparseImageFormatPropertiesAllocator, 1454 typename Dispatch, 1455 typename std::enable_if<std::is_same<typename SparseImageFormatPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, 1456 int>::type> 1457 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,Dispatch const & d) const1458 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1459 VULKAN_HPP_NAMESPACE::ImageType type, 1460 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1461 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1462 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1463 SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, 1464 Dispatch const & d ) const 1465 { 1466 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1467 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1468 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && 1469 "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" ); 1470 # endif 1471 1472 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator ); 1473 uint32_t propertyCount; 1474 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1475 static_cast<VkFormat>( format ), 1476 static_cast<VkImageType>( type ), 1477 static_cast<VkSampleCountFlagBits>( samples ), 1478 static_cast<VkImageUsageFlags>( usage ), 1479 static_cast<VkImageTiling>( tiling ), 1480 &propertyCount, 1481 nullptr ); 1482 properties.resize( propertyCount ); 1483 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1484 static_cast<VkFormat>( format ), 1485 static_cast<VkImageType>( type ), 1486 static_cast<VkSampleCountFlagBits>( samples ), 1487 static_cast<VkImageUsageFlags>( usage ), 1488 static_cast<VkImageTiling>( tiling ), 1489 &propertyCount, 1490 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1491 1492 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1493 if ( propertyCount < properties.size() ) 1494 { 1495 properties.resize( propertyCount ); 1496 } 1497 return properties; 1498 } 1499 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1500 1501 template <typename Dispatch> bindSparse(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1502 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, 1503 const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, 1504 VULKAN_HPP_NAMESPACE::Fence fence, 1505 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1506 { 1507 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1508 return static_cast<Result>( d.vkQueueBindSparse( 1509 static_cast<VkQueue>( m_queue ), bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) ); 1510 } 1511 1512 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1513 template <typename Dispatch> bindSparse(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1514 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( 1515 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 1516 { 1517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1518 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1519 VULKAN_HPP_ASSERT( d.vkQueueBindSparse && "Function <vkQueueBindSparse> requires <VK_VERSION_1_0>" ); 1520 # endif 1521 1522 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1523 d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ) ); 1524 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); 1525 1526 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1527 } 1528 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1529 1530 template <typename Dispatch> createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const1531 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, 1532 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1533 VULKAN_HPP_NAMESPACE::Fence * pFence, 1534 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1535 { 1536 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1537 return static_cast<Result>( d.vkCreateFence( static_cast<VkDevice>( m_device ), 1538 reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), 1539 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1540 reinterpret_cast<VkFence *>( pFence ) ) ); 1541 } 1542 1543 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1544 template <typename Dispatch> createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1545 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( 1546 const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1547 { 1548 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1549 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1550 VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" ); 1551 # endif 1552 1553 VULKAN_HPP_NAMESPACE::Fence fence; 1554 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1555 d.vkCreateFence( m_device, 1556 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1557 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1558 reinterpret_cast<VkFence *>( &fence ) ) ); 1559 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); 1560 1561 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); 1562 } 1563 1564 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1565 template <typename Dispatch> createFenceUnique(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1566 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( 1567 const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1568 { 1569 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1570 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1571 VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" ); 1572 # endif 1573 1574 VULKAN_HPP_NAMESPACE::Fence fence; 1575 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1576 d.vkCreateFence( m_device, 1577 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1578 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1579 reinterpret_cast<VkFence *>( &fence ) ) ); 1580 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); 1581 1582 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1583 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1584 } 1585 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1586 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1587 1588 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1589 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1590 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1591 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1592 { 1593 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1594 d.vkDestroyFence( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1595 } 1596 1597 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1598 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1599 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1600 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1601 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1602 { 1603 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1604 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1605 VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" ); 1606 # endif 1607 1608 d.vkDestroyFence( m_device, 1609 static_cast<VkFence>( fence ), 1610 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1611 } 1612 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1613 1614 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1615 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1616 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1617 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1618 { 1619 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1620 d.vkDestroyFence( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1621 } 1622 1623 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1624 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1625 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1626 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1627 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1628 { 1629 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1630 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1631 VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" ); 1632 # endif 1633 1634 d.vkDestroyFence( m_device, 1635 static_cast<VkFence>( fence ), 1636 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1637 } 1638 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1639 1640 template <typename Dispatch> resetFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,Dispatch const & d) const1641 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, 1642 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1643 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1644 { 1645 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1646 return static_cast<Result>( d.vkResetFences( static_cast<VkDevice>( m_device ), fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) ); 1647 } 1648 1649 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1650 template <typename Dispatch> 1651 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,Dispatch const & d) const1652 Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const 1653 { 1654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1655 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1656 VULKAN_HPP_ASSERT( d.vkResetFences && "Function <vkResetFences> requires <VK_VERSION_1_0>" ); 1657 # endif 1658 1659 VULKAN_HPP_NAMESPACE::Result result = 1660 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) ); 1661 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); 1662 1663 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1664 } 1665 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1666 1667 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1668 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1669 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1670 { 1671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1672 return static_cast<Result>( d.vkGetFenceStatus( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ) ) ); 1673 } 1674 #else 1675 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1676 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 1677 { 1678 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1679 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1680 VULKAN_HPP_ASSERT( d.vkGetFenceStatus && "Function <vkGetFenceStatus> requires <VK_VERSION_1_0>" ); 1681 # endif 1682 1683 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); 1684 VULKAN_HPP_NAMESPACE::detail::resultCheck( 1685 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1686 1687 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1688 } 1689 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1690 1691 template <typename Dispatch> waitForFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1692 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, 1693 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1694 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1695 uint64_t timeout, 1696 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1697 { 1698 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1699 return static_cast<Result>( d.vkWaitForFences( 1700 static_cast<VkDevice>( m_device ), fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) ); 1701 } 1702 1703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1704 template <typename Dispatch> 1705 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitForFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1706 Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, 1707 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1708 uint64_t timeout, 1709 Dispatch const & d ) const 1710 { 1711 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1712 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1713 VULKAN_HPP_ASSERT( d.vkWaitForFences && "Function <vkWaitForFences> requires <VK_VERSION_1_0>" ); 1714 # endif 1715 1716 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1717 d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) ); 1718 VULKAN_HPP_NAMESPACE::detail::resultCheck( 1719 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 1720 1721 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1722 } 1723 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1724 1725 template <typename Dispatch> createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,Dispatch const & d) const1726 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, 1727 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1728 VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, 1729 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1730 { 1731 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1732 return static_cast<Result>( d.vkCreateSemaphore( static_cast<VkDevice>( m_device ), 1733 reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), 1734 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1735 reinterpret_cast<VkSemaphore *>( pSemaphore ) ) ); 1736 } 1737 1738 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1739 template <typename Dispatch> 1740 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1741 Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, 1742 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1743 Dispatch const & d ) const 1744 { 1745 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1746 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1747 VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" ); 1748 # endif 1749 1750 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1751 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1752 d.vkCreateSemaphore( m_device, 1753 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1754 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1755 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1756 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); 1757 1758 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( semaphore ) ); 1759 } 1760 1761 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1762 template <typename Dispatch> 1763 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type createSemaphoreUnique(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1764 Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, 1765 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1766 Dispatch const & d ) const 1767 { 1768 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1769 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1770 VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" ); 1771 # endif 1772 1773 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1774 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1775 d.vkCreateSemaphore( m_device, 1776 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1777 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1778 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1779 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); 1780 1781 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1782 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1783 } 1784 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1785 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1786 1787 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1788 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1789 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1790 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1791 { 1792 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1793 d.vkDestroySemaphore( 1794 static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1795 } 1796 1797 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1798 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1799 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1800 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1801 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1802 { 1803 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1804 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1805 VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" ); 1806 # endif 1807 1808 d.vkDestroySemaphore( m_device, 1809 static_cast<VkSemaphore>( semaphore ), 1810 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1811 } 1812 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1813 1814 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1815 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1816 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1817 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1818 { 1819 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1820 d.vkDestroySemaphore( 1821 static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1822 } 1823 1824 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1825 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1826 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1827 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1828 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1829 { 1830 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1831 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1832 VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" ); 1833 # endif 1834 1835 d.vkDestroySemaphore( m_device, 1836 static_cast<VkSemaphore>( semaphore ), 1837 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1838 } 1839 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1840 1841 template <typename Dispatch> createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Event * pEvent,Dispatch const & d) const1842 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, 1843 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1844 VULKAN_HPP_NAMESPACE::Event * pEvent, 1845 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1846 { 1847 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1848 return static_cast<Result>( d.vkCreateEvent( static_cast<VkDevice>( m_device ), 1849 reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), 1850 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1851 reinterpret_cast<VkEvent *>( pEvent ) ) ); 1852 } 1853 1854 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1855 template <typename Dispatch> createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1856 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( 1857 const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1858 { 1859 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1860 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1861 VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" ); 1862 # endif 1863 1864 VULKAN_HPP_NAMESPACE::Event event; 1865 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1866 d.vkCreateEvent( m_device, 1867 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1868 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1869 reinterpret_cast<VkEvent *>( &event ) ) ); 1870 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); 1871 1872 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( event ) ); 1873 } 1874 1875 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1876 template <typename Dispatch> createEventUnique(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1877 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( 1878 const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1879 { 1880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1881 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1882 VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" ); 1883 # endif 1884 1885 VULKAN_HPP_NAMESPACE::Event event; 1886 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1887 d.vkCreateEvent( m_device, 1888 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1889 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1890 reinterpret_cast<VkEvent *>( &event ) ) ); 1891 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); 1892 1893 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1894 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1895 } 1896 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1897 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1898 1899 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1900 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1901 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1902 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1903 { 1904 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1905 d.vkDestroyEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1906 } 1907 1908 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1909 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1910 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1911 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1912 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1913 { 1914 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1915 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1916 VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" ); 1917 # endif 1918 1919 d.vkDestroyEvent( m_device, 1920 static_cast<VkEvent>( event ), 1921 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1922 } 1923 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1924 1925 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1926 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1927 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1928 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1929 { 1930 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1931 d.vkDestroyEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1932 } 1933 1934 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1935 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1936 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1937 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1938 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1939 { 1940 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1941 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1942 VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" ); 1943 # endif 1944 1945 d.vkDestroyEvent( m_device, 1946 static_cast<VkEvent>( event ), 1947 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1948 } 1949 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1950 1951 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1952 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1953 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1954 { 1955 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1956 return static_cast<Result>( d.vkGetEventStatus( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) ); 1957 } 1958 #else 1959 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1960 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const 1961 { 1962 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1963 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1964 VULKAN_HPP_ASSERT( d.vkGetEventStatus && "Function <vkGetEventStatus> requires <VK_VERSION_1_0>" ); 1965 # endif 1966 1967 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); 1968 VULKAN_HPP_NAMESPACE::detail::resultCheck( 1969 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); 1970 1971 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1972 } 1973 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1974 1975 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1976 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1977 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1978 { 1979 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1980 return static_cast<Result>( d.vkSetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) ); 1981 } 1982 #else 1983 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1984 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, 1985 Dispatch const & d ) const 1986 { 1987 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1988 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1989 VULKAN_HPP_ASSERT( d.vkSetEvent && "Function <vkSetEvent> requires <VK_VERSION_1_0>" ); 1990 # endif 1991 1992 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1993 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); 1994 1995 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1996 } 1997 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1998 1999 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 2000 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const2001 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2002 { 2003 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2004 return static_cast<Result>( d.vkResetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) ); 2005 } 2006 #else 2007 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const2008 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const 2009 { 2010 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2011 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2012 VULKAN_HPP_ASSERT( d.vkResetEvent && "Function <vkResetEvent> requires <VK_VERSION_1_0>" ); 2013 # endif 2014 2015 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); 2016 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); 2017 2018 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 2019 } 2020 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2021 2022 template <typename Dispatch> createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,Dispatch const & d) const2023 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, 2024 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2025 VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, 2026 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2027 { 2028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2029 return static_cast<Result>( d.vkCreateQueryPool( static_cast<VkDevice>( m_device ), 2030 reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), 2031 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2032 reinterpret_cast<VkQueryPool *>( pQueryPool ) ) ); 2033 } 2034 2035 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2036 template <typename Dispatch> 2037 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2038 Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, 2039 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2040 Dispatch const & d ) const 2041 { 2042 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2043 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2044 VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" ); 2045 # endif 2046 2047 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 2048 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2049 d.vkCreateQueryPool( m_device, 2050 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 2051 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2052 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 2053 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); 2054 2055 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( queryPool ) ); 2056 } 2057 2058 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2059 template <typename Dispatch> 2060 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type createQueryPoolUnique(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2061 Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, 2062 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2063 Dispatch const & d ) const 2064 { 2065 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2066 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2067 VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" ); 2068 # endif 2069 2070 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 2071 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2072 d.vkCreateQueryPool( m_device, 2073 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 2074 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2075 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 2076 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); 2077 2078 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2079 result, UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2080 } 2081 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2082 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2083 2084 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2085 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2086 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2087 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2088 { 2089 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2090 d.vkDestroyQueryPool( 2091 static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2092 } 2093 2094 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2095 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2096 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2097 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2098 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2099 { 2100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2101 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2102 VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" ); 2103 # endif 2104 2105 d.vkDestroyQueryPool( m_device, 2106 static_cast<VkQueryPool>( queryPool ), 2107 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2108 } 2109 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2110 2111 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2112 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2113 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2114 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2115 { 2116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2117 d.vkDestroyQueryPool( 2118 static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2119 } 2120 2121 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2122 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2123 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2124 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2125 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2126 { 2127 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2128 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2129 VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" ); 2130 # endif 2131 2132 d.vkDestroyQueryPool( m_device, 2133 static_cast<VkQueryPool>( queryPool ), 2134 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2135 } 2136 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2137 2138 template <typename Dispatch> getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const2139 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2140 uint32_t firstQuery, 2141 uint32_t queryCount, 2142 size_t dataSize, 2143 void * pData, 2144 VULKAN_HPP_NAMESPACE::DeviceSize stride, 2145 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 2146 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2147 { 2148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2149 return static_cast<Result>( d.vkGetQueryPoolResults( static_cast<VkDevice>( m_device ), 2150 static_cast<VkQueryPool>( queryPool ), 2151 firstQuery, 2152 queryCount, 2153 dataSize, 2154 pData, 2155 static_cast<VkDeviceSize>( stride ), 2156 static_cast<VkQueryResultFlags>( flags ) ) ); 2157 } 2158 2159 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2160 template <typename DataType, 2161 typename DataTypeAllocator, 2162 typename Dispatch, 2163 typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type> 2164 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>> getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const2165 Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2166 uint32_t firstQuery, 2167 uint32_t queryCount, 2168 size_t dataSize, 2169 VULKAN_HPP_NAMESPACE::DeviceSize stride, 2170 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 2171 Dispatch const & d ) const 2172 { 2173 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2174 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2175 VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" ); 2176 # endif 2177 2178 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 2179 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 2180 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device, 2181 static_cast<VkQueryPool>( queryPool ), 2182 firstQuery, 2183 queryCount, 2184 data.size() * sizeof( DataType ), 2185 reinterpret_cast<void *>( data.data() ), 2186 static_cast<VkDeviceSize>( stride ), 2187 static_cast<VkQueryResultFlags>( flags ) ) ); 2188 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 2189 VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", 2190 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 2191 2192 return ResultValue<std::vector<DataType, DataTypeAllocator>>( result, std::move( data ) ); 2193 } 2194 2195 template <typename DataType, typename Dispatch> getQueryPoolResult(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const2196 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2197 uint32_t firstQuery, 2198 uint32_t queryCount, 2199 VULKAN_HPP_NAMESPACE::DeviceSize stride, 2200 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 2201 Dispatch const & d ) const 2202 { 2203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2204 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2205 VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" ); 2206 # endif 2207 2208 DataType data; 2209 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device, 2210 static_cast<VkQueryPool>( queryPool ), 2211 firstQuery, 2212 queryCount, 2213 sizeof( DataType ), 2214 reinterpret_cast<void *>( &data ), 2215 static_cast<VkDeviceSize>( stride ), 2216 static_cast<VkQueryResultFlags>( flags ) ) ); 2217 VULKAN_HPP_NAMESPACE::detail::resultCheck( 2218 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 2219 2220 return ResultValue<DataType>( result, std::move( data ) ); 2221 } 2222 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2223 2224 template <typename Dispatch> createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Buffer * pBuffer,Dispatch const & d) const2225 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, 2226 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2227 VULKAN_HPP_NAMESPACE::Buffer * pBuffer, 2228 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2229 { 2230 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2231 return static_cast<Result>( d.vkCreateBuffer( static_cast<VkDevice>( m_device ), 2232 reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), 2233 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2234 reinterpret_cast<VkBuffer *>( pBuffer ) ) ); 2235 } 2236 2237 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2238 template <typename Dispatch> createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2239 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( 2240 const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2241 { 2242 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2243 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2244 VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" ); 2245 # endif 2246 2247 VULKAN_HPP_NAMESPACE::Buffer buffer; 2248 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2249 d.vkCreateBuffer( m_device, 2250 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 2251 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2252 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 2253 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); 2254 2255 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); 2256 } 2257 2258 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2259 template <typename Dispatch> createBufferUnique(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2260 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( 2261 const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2262 { 2263 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2264 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2265 VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" ); 2266 # endif 2267 2268 VULKAN_HPP_NAMESPACE::Buffer buffer; 2269 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2270 d.vkCreateBuffer( m_device, 2271 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 2272 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2273 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 2274 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); 2275 2276 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2277 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2278 } 2279 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2280 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2281 2282 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2283 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 2284 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2285 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2286 { 2287 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2288 d.vkDestroyBuffer( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2289 } 2290 2291 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2292 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2293 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 2294 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2295 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2296 { 2297 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2298 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2299 VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" ); 2300 # endif 2301 2302 d.vkDestroyBuffer( m_device, 2303 static_cast<VkBuffer>( buffer ), 2304 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2305 } 2306 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2307 2308 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2309 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 2310 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2311 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2312 { 2313 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2314 d.vkDestroyBuffer( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2315 } 2316 2317 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2318 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2319 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 2320 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2321 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2322 { 2323 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2324 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2325 VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" ); 2326 # endif 2327 2328 d.vkDestroyBuffer( m_device, 2329 static_cast<VkBuffer>( buffer ), 2330 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2331 } 2332 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2333 2334 template <typename Dispatch> createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferView * pView,Dispatch const & d) const2335 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, 2336 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2337 VULKAN_HPP_NAMESPACE::BufferView * pView, 2338 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2339 { 2340 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2341 return static_cast<Result>( d.vkCreateBufferView( static_cast<VkDevice>( m_device ), 2342 reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), 2343 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2344 reinterpret_cast<VkBufferView *>( pView ) ) ); 2345 } 2346 2347 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2348 template <typename Dispatch> 2349 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2350 Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, 2351 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2352 Dispatch const & d ) const 2353 { 2354 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2355 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2356 VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" ); 2357 # endif 2358 2359 VULKAN_HPP_NAMESPACE::BufferView view; 2360 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2361 d.vkCreateBufferView( m_device, 2362 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2363 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2364 reinterpret_cast<VkBufferView *>( &view ) ) ); 2365 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); 2366 2367 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); 2368 } 2369 2370 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2371 template <typename Dispatch> 2372 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type createBufferViewUnique(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2373 Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, 2374 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2375 Dispatch const & d ) const 2376 { 2377 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2378 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2379 VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" ); 2380 # endif 2381 2382 VULKAN_HPP_NAMESPACE::BufferView view; 2383 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2384 d.vkCreateBufferView( m_device, 2385 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2386 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2387 reinterpret_cast<VkBufferView *>( &view ) ) ); 2388 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); 2389 2390 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2391 result, UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2392 } 2393 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2394 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2395 2396 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2397 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2398 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2399 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2400 { 2401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2402 d.vkDestroyBufferView( 2403 static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2404 } 2405 2406 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2407 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2408 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2409 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2410 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2411 { 2412 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2413 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2414 VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" ); 2415 # endif 2416 2417 d.vkDestroyBufferView( m_device, 2418 static_cast<VkBufferView>( bufferView ), 2419 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2420 } 2421 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2422 2423 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2424 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2425 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2426 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2427 { 2428 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2429 d.vkDestroyBufferView( 2430 static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2431 } 2432 2433 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2434 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2435 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2436 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2437 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2438 { 2439 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2440 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2441 VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" ); 2442 # endif 2443 2444 d.vkDestroyBufferView( m_device, 2445 static_cast<VkBufferView>( bufferView ), 2446 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2447 } 2448 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2449 2450 template <typename Dispatch> createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Image * pImage,Dispatch const & d) const2451 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, 2452 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2453 VULKAN_HPP_NAMESPACE::Image * pImage, 2454 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2455 { 2456 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2457 return static_cast<Result>( d.vkCreateImage( static_cast<VkDevice>( m_device ), 2458 reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), 2459 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2460 reinterpret_cast<VkImage *>( pImage ) ) ); 2461 } 2462 2463 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2464 template <typename Dispatch> createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2465 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( 2466 const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2467 { 2468 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2469 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2470 VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" ); 2471 # endif 2472 2473 VULKAN_HPP_NAMESPACE::Image image; 2474 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2475 d.vkCreateImage( m_device, 2476 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2477 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2478 reinterpret_cast<VkImage *>( &image ) ) ); 2479 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); 2480 2481 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( image ) ); 2482 } 2483 2484 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2485 template <typename Dispatch> createImageUnique(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2486 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( 2487 const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2488 { 2489 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2490 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2491 VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" ); 2492 # endif 2493 2494 VULKAN_HPP_NAMESPACE::Image image; 2495 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2496 d.vkCreateImage( m_device, 2497 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2498 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2499 reinterpret_cast<VkImage *>( &image ) ) ); 2500 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); 2501 2502 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2503 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2504 } 2505 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2506 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2507 2508 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2509 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2510 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2511 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2512 { 2513 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2514 d.vkDestroyImage( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2515 } 2516 2517 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2518 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2519 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2520 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2521 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2522 { 2523 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2524 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2525 VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" ); 2526 # endif 2527 2528 d.vkDestroyImage( m_device, 2529 static_cast<VkImage>( image ), 2530 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2531 } 2532 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2533 2534 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2535 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2536 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2537 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2538 { 2539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2540 d.vkDestroyImage( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2541 } 2542 2543 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2544 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2545 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2546 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2547 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2548 { 2549 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2550 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2551 VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" ); 2552 # endif 2553 2554 d.vkDestroyImage( m_device, 2555 static_cast<VkImage>( image ), 2556 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2557 } 2558 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2559 2560 template <typename Dispatch> getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,Dispatch const & d) const2561 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, 2562 const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, 2563 VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, 2564 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2565 { 2566 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2567 d.vkGetImageSubresourceLayout( static_cast<VkDevice>( m_device ), 2568 static_cast<VkImage>( image ), 2569 reinterpret_cast<const VkImageSubresource *>( pSubresource ), 2570 reinterpret_cast<VkSubresourceLayout *>( pLayout ) ); 2571 } 2572 2573 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2574 template <typename Dispatch> getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource,Dispatch const & d) const2575 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( 2576 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2577 { 2578 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2579 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2580 VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout && "Function <vkGetImageSubresourceLayout> requires <VK_VERSION_1_0>" ); 2581 # endif 2582 2583 VULKAN_HPP_NAMESPACE::SubresourceLayout layout; 2584 d.vkGetImageSubresourceLayout( m_device, 2585 static_cast<VkImage>( image ), 2586 reinterpret_cast<const VkImageSubresource *>( &subresource ), 2587 reinterpret_cast<VkSubresourceLayout *>( &layout ) ); 2588 2589 return layout; 2590 } 2591 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2592 2593 template <typename Dispatch> createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ImageView * pView,Dispatch const & d) const2594 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, 2595 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2596 VULKAN_HPP_NAMESPACE::ImageView * pView, 2597 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2598 { 2599 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2600 return static_cast<Result>( d.vkCreateImageView( static_cast<VkDevice>( m_device ), 2601 reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), 2602 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2603 reinterpret_cast<VkImageView *>( pView ) ) ); 2604 } 2605 2606 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2607 template <typename Dispatch> 2608 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2609 Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, 2610 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2611 Dispatch const & d ) const 2612 { 2613 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2614 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2615 VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" ); 2616 # endif 2617 2618 VULKAN_HPP_NAMESPACE::ImageView view; 2619 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2620 d.vkCreateImageView( m_device, 2621 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2622 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2623 reinterpret_cast<VkImageView *>( &view ) ) ); 2624 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); 2625 2626 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); 2627 } 2628 2629 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2630 template <typename Dispatch> 2631 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type createImageViewUnique(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2632 Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, 2633 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2634 Dispatch const & d ) const 2635 { 2636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2637 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2638 VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" ); 2639 # endif 2640 2641 VULKAN_HPP_NAMESPACE::ImageView view; 2642 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2643 d.vkCreateImageView( m_device, 2644 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2645 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2646 reinterpret_cast<VkImageView *>( &view ) ) ); 2647 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); 2648 2649 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2650 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2651 } 2652 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2653 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2654 2655 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2656 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2657 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2658 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2659 { 2660 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2661 d.vkDestroyImageView( 2662 static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2663 } 2664 2665 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2666 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2667 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2668 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2669 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2670 { 2671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2672 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2673 VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" ); 2674 # endif 2675 2676 d.vkDestroyImageView( m_device, 2677 static_cast<VkImageView>( imageView ), 2678 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2679 } 2680 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2681 2682 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2683 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2684 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2685 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2686 { 2687 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2688 d.vkDestroyImageView( 2689 static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2690 } 2691 2692 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2693 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2694 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2695 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2696 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2697 { 2698 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2699 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2700 VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" ); 2701 # endif 2702 2703 d.vkDestroyImageView( m_device, 2704 static_cast<VkImageView>( imageView ), 2705 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2706 } 2707 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2708 2709 template <typename Dispatch> createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,Dispatch const & d) const2710 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, 2711 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2712 VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, 2713 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2714 { 2715 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2716 return static_cast<Result>( d.vkCreateShaderModule( static_cast<VkDevice>( m_device ), 2717 reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), 2718 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2719 reinterpret_cast<VkShaderModule *>( pShaderModule ) ) ); 2720 } 2721 2722 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2723 template <typename Dispatch> 2724 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2725 Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 2726 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2727 Dispatch const & d ) const 2728 { 2729 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2730 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2731 VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" ); 2732 # endif 2733 2734 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2735 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2736 d.vkCreateShaderModule( m_device, 2737 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2738 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2739 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2740 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); 2741 2742 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( shaderModule ) ); 2743 } 2744 2745 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2746 template <typename Dispatch> 2747 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type createShaderModuleUnique(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2748 Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 2749 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2750 Dispatch const & d ) const 2751 { 2752 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2753 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2754 VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" ); 2755 # endif 2756 2757 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2758 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2759 d.vkCreateShaderModule( m_device, 2760 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2761 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2762 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2763 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); 2764 2765 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2766 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2767 } 2768 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2769 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2770 2771 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2772 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2773 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2774 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2775 { 2776 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2777 d.vkDestroyShaderModule( 2778 static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2779 } 2780 2781 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2782 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2783 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2784 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2785 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2786 { 2787 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2788 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2789 VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" ); 2790 # endif 2791 2792 d.vkDestroyShaderModule( m_device, 2793 static_cast<VkShaderModule>( shaderModule ), 2794 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2795 } 2796 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2797 2798 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2799 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2800 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2801 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2802 { 2803 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2804 d.vkDestroyShaderModule( 2805 static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2806 } 2807 2808 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2809 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2810 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2811 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2812 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2813 { 2814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2815 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2816 VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" ); 2817 # endif 2818 2819 d.vkDestroyShaderModule( m_device, 2820 static_cast<VkShaderModule>( shaderModule ), 2821 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2822 } 2823 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2824 2825 template <typename Dispatch> createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,Dispatch const & d) const2826 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, 2827 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2828 VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, 2829 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2830 { 2831 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2832 return static_cast<Result>( d.vkCreatePipelineCache( static_cast<VkDevice>( m_device ), 2833 reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), 2834 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2835 reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) ); 2836 } 2837 2838 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2839 template <typename Dispatch> 2840 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2841 Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, 2842 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2843 Dispatch const & d ) const 2844 { 2845 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2846 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2847 VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" ); 2848 # endif 2849 2850 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2851 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2852 d.vkCreatePipelineCache( m_device, 2853 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2854 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2855 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2856 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); 2857 2858 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineCache ) ); 2859 } 2860 2861 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2862 template <typename Dispatch> 2863 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type createPipelineCacheUnique(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2864 Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, 2865 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2866 Dispatch const & d ) const 2867 { 2868 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2869 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2870 VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" ); 2871 # endif 2872 2873 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2874 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2875 d.vkCreatePipelineCache( m_device, 2876 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2877 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2878 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2879 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); 2880 2881 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2882 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2883 } 2884 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2885 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2886 2887 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2888 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2889 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2890 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2891 { 2892 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2893 d.vkDestroyPipelineCache( 2894 static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2895 } 2896 2897 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2898 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2899 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2900 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2901 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2902 { 2903 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2904 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2905 VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" ); 2906 # endif 2907 2908 d.vkDestroyPipelineCache( m_device, 2909 static_cast<VkPipelineCache>( pipelineCache ), 2910 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2911 } 2912 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2913 2914 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2915 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2916 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2917 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2918 { 2919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2920 d.vkDestroyPipelineCache( 2921 static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2922 } 2923 2924 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2925 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2926 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2927 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2928 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2929 { 2930 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2931 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2932 VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" ); 2933 # endif 2934 2935 d.vkDestroyPipelineCache( m_device, 2936 static_cast<VkPipelineCache>( pipelineCache ), 2937 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2938 } 2939 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2940 2941 template <typename Dispatch> getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,size_t * pDataSize,void * pData,Dispatch const & d) const2942 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2943 size_t * pDataSize, 2944 void * pData, 2945 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2946 { 2947 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2948 return static_cast<Result>( 2949 d.vkGetPipelineCacheData( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) ); 2950 } 2951 2952 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2953 template <typename Uint8_tAllocator, 2954 typename Dispatch, 2955 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 2956 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Dispatch const & d) const2957 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const 2958 { 2959 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2960 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2961 VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" ); 2962 # endif 2963 2964 std::vector<uint8_t, Uint8_tAllocator> data; 2965 size_t dataSize; 2966 VULKAN_HPP_NAMESPACE::Result result; 2967 do 2968 { 2969 result = 2970 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 2971 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 2972 { 2973 data.resize( dataSize ); 2974 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2975 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 2976 } 2977 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 2978 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 2979 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 2980 if ( dataSize < data.size() ) 2981 { 2982 data.resize( dataSize ); 2983 } 2984 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 2985 } 2986 2987 template <typename Uint8_tAllocator, 2988 typename Dispatch, 2989 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 2990 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const2991 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 2992 { 2993 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2994 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2995 VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" ); 2996 # endif 2997 2998 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 2999 size_t dataSize; 3000 VULKAN_HPP_NAMESPACE::Result result; 3001 do 3002 { 3003 result = 3004 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 3005 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 3006 { 3007 data.resize( dataSize ); 3008 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3009 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 3010 } 3011 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 3012 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 3013 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 3014 if ( dataSize < data.size() ) 3015 { 3016 data.resize( dataSize ); 3017 } 3018 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 3019 } 3020 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3021 3022 template <typename Dispatch> mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,Dispatch const & d) const3023 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 3024 uint32_t srcCacheCount, 3025 const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, 3026 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3027 { 3028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3029 return static_cast<Result>( d.vkMergePipelineCaches( 3030 static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) ); 3031 } 3032 3033 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3034 template <typename Dispatch> 3035 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,Dispatch const & d) const3036 Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 3037 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, 3038 Dispatch const & d ) const 3039 { 3040 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3041 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3042 VULKAN_HPP_ASSERT( d.vkMergePipelineCaches && "Function <vkMergePipelineCaches> requires <VK_VERSION_1_0>" ); 3043 # endif 3044 3045 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergePipelineCaches( 3046 m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) ); 3047 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); 3048 3049 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 3050 } 3051 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3052 3053 template <typename Dispatch> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const3054 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3055 uint32_t createInfoCount, 3056 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, 3057 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3058 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 3059 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3060 { 3061 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3062 return static_cast<Result>( d.vkCreateGraphicsPipelines( static_cast<VkDevice>( m_device ), 3063 static_cast<VkPipelineCache>( pipelineCache ), 3064 createInfoCount, 3065 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), 3066 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3067 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 3068 } 3069 3070 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3071 template <typename PipelineAllocator, 3072 typename Dispatch, 3073 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 3074 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3075 Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3076 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3077 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3078 Dispatch const & d ) const 3079 { 3080 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3081 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3082 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3083 # endif 3084 3085 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 3086 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3087 m_device, 3088 static_cast<VkPipelineCache>( pipelineCache ), 3089 createInfos.size(), 3090 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3091 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3092 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3093 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3094 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 3095 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3096 3097 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3098 } 3099 3100 template <typename PipelineAllocator, 3101 typename Dispatch, 3102 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 3103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3104 Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3105 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3106 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3107 PipelineAllocator & pipelineAllocator, 3108 Dispatch const & d ) const 3109 { 3110 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3111 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3112 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3113 # endif 3114 3115 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 3116 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3117 m_device, 3118 static_cast<VkPipelineCache>( pipelineCache ), 3119 createInfos.size(), 3120 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3121 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3122 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3123 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3124 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 3125 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3126 3127 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3128 } 3129 3130 template <typename Dispatch> 3131 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createGraphicsPipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3132 Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3133 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 3134 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3135 Dispatch const & d ) const 3136 { 3137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3138 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3139 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3140 # endif 3141 3142 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3143 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3144 m_device, 3145 static_cast<VkPipelineCache>( pipelineCache ), 3146 1, 3147 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 3148 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3149 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3150 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3151 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", 3152 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3153 3154 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 3155 } 3156 3157 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3158 template < 3159 typename Dispatch, 3160 typename PipelineAllocator, 3161 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 3162 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3163 Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3164 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3165 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3166 Dispatch const & d ) const 3167 { 3168 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3169 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3170 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3171 # endif 3172 3173 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3174 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3175 m_device, 3176 static_cast<VkPipelineCache>( pipelineCache ), 3177 createInfos.size(), 3178 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3179 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3180 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3181 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3182 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 3183 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3184 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 3185 uniquePipelines.reserve( createInfos.size() ); 3186 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3187 for ( auto const & pipeline : pipelines ) 3188 { 3189 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3190 } 3191 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3192 } 3193 3194 template < 3195 typename Dispatch, 3196 typename PipelineAllocator, 3197 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 3198 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3199 Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3200 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3201 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3202 PipelineAllocator & pipelineAllocator, 3203 Dispatch const & d ) const 3204 { 3205 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3206 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3207 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3208 # endif 3209 3210 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3211 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3212 m_device, 3213 static_cast<VkPipelineCache>( pipelineCache ), 3214 createInfos.size(), 3215 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3216 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3217 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3218 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3219 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 3220 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3221 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 3222 uniquePipelines.reserve( createInfos.size() ); 3223 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3224 for ( auto const & pipeline : pipelines ) 3225 { 3226 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3227 } 3228 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3229 } 3230 3231 template <typename Dispatch> 3232 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createGraphicsPipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3233 Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3234 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 3235 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3236 Dispatch const & d ) const 3237 { 3238 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3239 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3240 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3241 # endif 3242 3243 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3244 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3245 m_device, 3246 static_cast<VkPipelineCache>( pipelineCache ), 3247 1, 3248 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 3249 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3250 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3251 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3252 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", 3253 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3254 3255 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 3256 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3257 } 3258 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3259 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3260 3261 template <typename Dispatch> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const3262 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3263 uint32_t createInfoCount, 3264 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, 3265 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3266 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 3267 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3268 { 3269 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3270 return static_cast<Result>( d.vkCreateComputePipelines( static_cast<VkDevice>( m_device ), 3271 static_cast<VkPipelineCache>( pipelineCache ), 3272 createInfoCount, 3273 reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), 3274 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3275 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 3276 } 3277 3278 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3279 template <typename PipelineAllocator, 3280 typename Dispatch, 3281 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 3282 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3283 Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3284 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3285 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3286 Dispatch const & d ) const 3287 { 3288 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3289 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3290 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3291 # endif 3292 3293 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 3294 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3295 m_device, 3296 static_cast<VkPipelineCache>( pipelineCache ), 3297 createInfos.size(), 3298 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3299 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3300 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3301 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3302 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 3303 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3304 3305 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3306 } 3307 3308 template <typename PipelineAllocator, 3309 typename Dispatch, 3310 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 3311 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3312 Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3313 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3314 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3315 PipelineAllocator & pipelineAllocator, 3316 Dispatch const & d ) const 3317 { 3318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3319 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3320 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3321 # endif 3322 3323 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 3324 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3325 m_device, 3326 static_cast<VkPipelineCache>( pipelineCache ), 3327 createInfos.size(), 3328 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3329 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3330 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3331 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3332 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 3333 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3334 3335 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3336 } 3337 3338 template <typename Dispatch> 3339 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createComputePipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3340 Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3341 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 3342 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3343 Dispatch const & d ) const 3344 { 3345 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3346 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3347 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3348 # endif 3349 3350 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3351 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3352 m_device, 3353 static_cast<VkPipelineCache>( pipelineCache ), 3354 1, 3355 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 3356 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3357 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3358 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3359 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", 3360 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3361 3362 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 3363 } 3364 3365 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3366 template < 3367 typename Dispatch, 3368 typename PipelineAllocator, 3369 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 3370 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3371 Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3372 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3373 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3374 Dispatch const & d ) const 3375 { 3376 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3377 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3378 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3379 # endif 3380 3381 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3382 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3383 m_device, 3384 static_cast<VkPipelineCache>( pipelineCache ), 3385 createInfos.size(), 3386 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3387 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3388 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3389 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3390 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 3391 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3392 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 3393 uniquePipelines.reserve( createInfos.size() ); 3394 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3395 for ( auto const & pipeline : pipelines ) 3396 { 3397 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3398 } 3399 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3400 } 3401 3402 template < 3403 typename Dispatch, 3404 typename PipelineAllocator, 3405 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 3406 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3407 Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3408 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3409 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3410 PipelineAllocator & pipelineAllocator, 3411 Dispatch const & d ) const 3412 { 3413 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3414 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3415 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3416 # endif 3417 3418 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3419 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3420 m_device, 3421 static_cast<VkPipelineCache>( pipelineCache ), 3422 createInfos.size(), 3423 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3424 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3425 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3426 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3427 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 3428 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3429 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 3430 uniquePipelines.reserve( createInfos.size() ); 3431 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3432 for ( auto const & pipeline : pipelines ) 3433 { 3434 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3435 } 3436 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3437 } 3438 3439 template <typename Dispatch> 3440 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createComputePipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3441 Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3442 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 3443 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3444 Dispatch const & d ) const 3445 { 3446 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3447 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3448 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3449 # endif 3450 3451 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3452 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3453 m_device, 3454 static_cast<VkPipelineCache>( pipelineCache ), 3455 1, 3456 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 3457 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3458 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3459 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3460 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", 3461 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3462 3463 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 3464 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3465 } 3466 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3467 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3468 3469 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3470 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3471 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3472 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3473 { 3474 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3475 d.vkDestroyPipeline( 3476 static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3477 } 3478 3479 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3480 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3481 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3482 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3483 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3484 { 3485 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3486 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3487 VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" ); 3488 # endif 3489 3490 d.vkDestroyPipeline( m_device, 3491 static_cast<VkPipeline>( pipeline ), 3492 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3493 } 3494 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3495 3496 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3497 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3498 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3499 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3500 { 3501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3502 d.vkDestroyPipeline( 3503 static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3504 } 3505 3506 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3507 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3508 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3509 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3510 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3511 { 3512 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3513 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3514 VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" ); 3515 # endif 3516 3517 d.vkDestroyPipeline( m_device, 3518 static_cast<VkPipeline>( pipeline ), 3519 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3520 } 3521 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3522 3523 template <typename Dispatch> createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,Dispatch const & d) const3524 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, 3525 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3526 VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, 3527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3528 { 3529 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3530 return static_cast<Result>( d.vkCreatePipelineLayout( static_cast<VkDevice>( m_device ), 3531 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), 3532 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3533 reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) ); 3534 } 3535 3536 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3537 template <typename Dispatch> 3538 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3539 Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, 3540 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3541 Dispatch const & d ) const 3542 { 3543 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3544 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3545 VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" ); 3546 # endif 3547 3548 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3549 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3550 d.vkCreatePipelineLayout( m_device, 3551 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3552 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3553 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3554 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); 3555 3556 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineLayout ) ); 3557 } 3558 3559 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3560 template <typename Dispatch> 3561 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type createPipelineLayoutUnique(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3562 Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, 3563 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3564 Dispatch const & d ) const 3565 { 3566 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3567 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3568 VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" ); 3569 # endif 3570 3571 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3572 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3573 d.vkCreatePipelineLayout( m_device, 3574 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3575 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3576 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3577 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); 3578 3579 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3580 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3581 } 3582 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3583 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3584 3585 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3586 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3587 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3588 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3589 { 3590 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3591 d.vkDestroyPipelineLayout( 3592 static_cast<VkDevice>( m_device ), static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3593 } 3594 3595 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3596 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3597 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3598 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3599 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3600 { 3601 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3602 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3603 VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" ); 3604 # endif 3605 3606 d.vkDestroyPipelineLayout( m_device, 3607 static_cast<VkPipelineLayout>( pipelineLayout ), 3608 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3609 } 3610 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3611 3612 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3613 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3614 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3615 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3616 { 3617 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3618 d.vkDestroyPipelineLayout( 3619 static_cast<VkDevice>( m_device ), static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3620 } 3621 3622 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3623 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3624 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3625 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3626 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3627 { 3628 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3629 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3630 VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" ); 3631 # endif 3632 3633 d.vkDestroyPipelineLayout( m_device, 3634 static_cast<VkPipelineLayout>( pipelineLayout ), 3635 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3636 } 3637 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3638 3639 template <typename Dispatch> createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Sampler * pSampler,Dispatch const & d) const3640 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, 3641 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3642 VULKAN_HPP_NAMESPACE::Sampler * pSampler, 3643 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3644 { 3645 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3646 return static_cast<Result>( d.vkCreateSampler( static_cast<VkDevice>( m_device ), 3647 reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), 3648 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3649 reinterpret_cast<VkSampler *>( pSampler ) ) ); 3650 } 3651 3652 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3653 template <typename Dispatch> createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3654 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( 3655 const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 3656 { 3657 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3658 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3659 VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" ); 3660 # endif 3661 3662 VULKAN_HPP_NAMESPACE::Sampler sampler; 3663 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3664 d.vkCreateSampler( m_device, 3665 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3666 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3667 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3668 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); 3669 3670 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sampler ) ); 3671 } 3672 3673 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3674 template <typename Dispatch> createSamplerUnique(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3675 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( 3676 const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 3677 { 3678 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3679 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3680 VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" ); 3681 # endif 3682 3683 VULKAN_HPP_NAMESPACE::Sampler sampler; 3684 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3685 d.vkCreateSampler( m_device, 3686 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3687 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3688 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3689 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); 3690 3691 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3692 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3693 } 3694 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3695 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3696 3697 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3698 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3699 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3700 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3701 { 3702 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3703 d.vkDestroySampler( static_cast<VkDevice>( m_device ), static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3704 } 3705 3706 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3707 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3708 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3709 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3710 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3711 { 3712 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3713 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3714 VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" ); 3715 # endif 3716 3717 d.vkDestroySampler( m_device, 3718 static_cast<VkSampler>( sampler ), 3719 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3720 } 3721 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3722 3723 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3724 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3725 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3726 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3727 { 3728 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3729 d.vkDestroySampler( static_cast<VkDevice>( m_device ), static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3730 } 3731 3732 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3733 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3734 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3735 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3736 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3737 { 3738 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3739 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3740 VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" ); 3741 # endif 3742 3743 d.vkDestroySampler( m_device, 3744 static_cast<VkSampler>( sampler ), 3745 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3746 } 3747 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3748 3749 template <typename Dispatch> createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,Dispatch const & d) const3750 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 3751 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3752 VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, 3753 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3754 { 3755 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3756 return static_cast<Result>( d.vkCreateDescriptorSetLayout( static_cast<VkDevice>( m_device ), 3757 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), 3758 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3759 reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) ); 3760 } 3761 3762 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3763 template <typename Dispatch> 3764 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3765 Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 3766 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3767 Dispatch const & d ) const 3768 { 3769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3770 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3771 VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3772 # endif 3773 3774 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3775 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout( 3776 m_device, 3777 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3778 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3779 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3780 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); 3781 3782 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( setLayout ) ); 3783 } 3784 3785 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3786 template <typename Dispatch> 3787 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type createDescriptorSetLayoutUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3788 Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 3789 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3790 Dispatch const & d ) const 3791 { 3792 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3793 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3794 VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3795 # endif 3796 3797 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3798 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout( 3799 m_device, 3800 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3801 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3802 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3803 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); 3804 3805 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3806 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3807 } 3808 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3809 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3810 3811 template <typename Dispatch> destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3812 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3813 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3814 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3815 { 3816 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3817 d.vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ), 3818 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3819 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3820 } 3821 3822 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3823 template <typename Dispatch> destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3824 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3825 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3826 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3827 { 3828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3829 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3830 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3831 # endif 3832 3833 d.vkDestroyDescriptorSetLayout( 3834 m_device, 3835 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3836 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3837 } 3838 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3839 3840 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3841 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3842 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3843 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3844 { 3845 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3846 d.vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ), 3847 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3848 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3849 } 3850 3851 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3852 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3853 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3854 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3855 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3856 { 3857 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3858 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3859 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3860 # endif 3861 3862 d.vkDestroyDescriptorSetLayout( 3863 m_device, 3864 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3865 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3866 } 3867 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3868 3869 template <typename Dispatch> createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,Dispatch const & d) const3870 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, 3871 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3872 VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, 3873 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3874 { 3875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3876 return static_cast<Result>( d.vkCreateDescriptorPool( static_cast<VkDevice>( m_device ), 3877 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), 3878 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3879 reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) ); 3880 } 3881 3882 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3883 template <typename Dispatch> 3884 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3885 Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, 3886 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3887 Dispatch const & d ) const 3888 { 3889 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3890 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3891 VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" ); 3892 # endif 3893 3894 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3895 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3896 d.vkCreateDescriptorPool( m_device, 3897 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3898 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3899 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3900 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); 3901 3902 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorPool ) ); 3903 } 3904 3905 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3906 template <typename Dispatch> 3907 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type createDescriptorPoolUnique(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3908 Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, 3909 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3910 Dispatch const & d ) const 3911 { 3912 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3913 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3914 VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" ); 3915 # endif 3916 3917 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3918 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3919 d.vkCreateDescriptorPool( m_device, 3920 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3921 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3922 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3923 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); 3924 3925 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3926 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3927 } 3928 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3929 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3930 3931 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3932 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3933 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3934 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3935 { 3936 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3937 d.vkDestroyDescriptorPool( 3938 static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3939 } 3940 3941 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3942 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3943 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3944 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3945 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3946 { 3947 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3948 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3949 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" ); 3950 # endif 3951 3952 d.vkDestroyDescriptorPool( m_device, 3953 static_cast<VkDescriptorPool>( descriptorPool ), 3954 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3955 } 3956 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3957 3958 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3959 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3960 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3961 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3962 { 3963 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3964 d.vkDestroyDescriptorPool( 3965 static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3966 } 3967 3968 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3969 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3970 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3971 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3972 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3973 { 3974 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3975 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3976 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" ); 3977 # endif 3978 3979 d.vkDestroyDescriptorPool( m_device, 3980 static_cast<VkDescriptorPool>( descriptorPool ), 3981 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3982 } 3983 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3984 3985 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 3986 template <typename Dispatch> resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3987 VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3988 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3989 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3990 { 3991 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3992 return static_cast<Result>( d.vkResetDescriptorPool( 3993 static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); 3994 } 3995 #else 3996 template <typename Dispatch> resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3997 VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3998 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3999 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4000 { 4001 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4002 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4003 VULKAN_HPP_ASSERT( d.vkResetDescriptorPool && "Function <vkResetDescriptorPool> requires <VK_VERSION_1_0>" ); 4004 # endif 4005 4006 d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ); 4007 } 4008 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4009 4010 template <typename Dispatch> allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const4011 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, 4012 VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4013 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4014 { 4015 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4016 return static_cast<Result>( d.vkAllocateDescriptorSets( static_cast<VkDevice>( m_device ), 4017 reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), 4018 reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) ); 4019 } 4020 4021 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4022 template <typename DescriptorSetAllocator, 4023 typename Dispatch, 4024 typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, VULKAN_HPP_NAMESPACE::DescriptorSet>::value, int>::type> 4025 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const4026 Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 4027 { 4028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4029 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4030 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 4031 # endif 4032 4033 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount ); 4034 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 4035 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 4036 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 4037 4038 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); 4039 } 4040 4041 template <typename DescriptorSetAllocator, 4042 typename Dispatch, 4043 typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, VULKAN_HPP_NAMESPACE::DescriptorSet>::value, int>::type> 4044 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const4045 Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, 4046 DescriptorSetAllocator & descriptorSetAllocator, 4047 Dispatch const & d ) const 4048 { 4049 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4050 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4051 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 4052 # endif 4053 4054 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); 4055 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 4056 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 4057 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 4058 4059 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); 4060 } 4061 4062 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4063 template < 4064 typename Dispatch, 4065 typename DescriptorSetAllocator, 4066 typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>::value, 4067 int>::type> 4068 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4069 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const4070 Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 4071 { 4072 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4073 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4074 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 4075 # endif 4076 4077 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 4078 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 4079 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 4080 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 4081 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets; 4082 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 4083 detail::PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 4084 for ( auto const & descriptorSet : descriptorSets ) 4085 { 4086 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); 4087 } 4088 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); 4089 } 4090 4091 template < 4092 typename Dispatch, 4093 typename DescriptorSetAllocator, 4094 typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>::value, 4095 int>::type> 4096 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4097 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const4098 Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, 4099 DescriptorSetAllocator & descriptorSetAllocator, 4100 Dispatch const & d ) const 4101 { 4102 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4103 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4104 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 4105 # endif 4106 4107 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 4108 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 4109 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 4110 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 4111 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); 4112 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 4113 detail::PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 4114 for ( auto const & descriptorSet : descriptorSets ) 4115 { 4116 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); 4117 } 4118 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); 4119 } 4120 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4121 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4122 4123 template <typename Dispatch> freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const4124 VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4125 uint32_t descriptorSetCount, 4126 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4127 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4128 { 4129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4130 return static_cast<Result>( d.vkFreeDescriptorSets( static_cast<VkDevice>( m_device ), 4131 static_cast<VkDescriptorPool>( descriptorPool ), 4132 descriptorSetCount, 4133 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 4134 } 4135 4136 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4137 template <typename Dispatch> freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,Dispatch const & d) const4138 VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4139 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4140 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4141 { 4142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4143 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4144 VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" ); 4145 # endif 4146 4147 d.vkFreeDescriptorSets( 4148 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); 4149 } 4150 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4151 4152 template <typename Dispatch> Result(Device::free)4153 VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4154 uint32_t descriptorSetCount, 4155 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4156 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4157 { 4158 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4159 return static_cast<Result>( d.vkFreeDescriptorSets( static_cast<VkDevice>( m_device ), 4160 static_cast<VkDescriptorPool>( descriptorPool ), 4161 descriptorSetCount, 4162 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 4163 } 4164 4165 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4166 template <typename Dispatch> 4167 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4168 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4169 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4170 { 4171 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4172 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4173 VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" ); 4174 # endif 4175 4176 d.vkFreeDescriptorSets( 4177 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); 4178 } 4179 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4180 4181 template <typename Dispatch> updateDescriptorSets(uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,Dispatch const & d) const4182 VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, 4183 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 4184 uint32_t descriptorCopyCount, 4185 const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, 4186 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4187 { 4188 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4189 d.vkUpdateDescriptorSets( static_cast<VkDevice>( m_device ), 4190 descriptorWriteCount, 4191 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), 4192 descriptorCopyCount, 4193 reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) ); 4194 } 4195 4196 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4197 template <typename Dispatch> 4198 VULKAN_HPP_INLINE void updateDescriptorSets(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,Dispatch const & d) const4199 Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 4200 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, 4201 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4202 { 4203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4204 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4205 VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSets && "Function <vkUpdateDescriptorSets> requires <VK_VERSION_1_0>" ); 4206 # endif 4207 4208 d.vkUpdateDescriptorSets( m_device, 4209 descriptorWrites.size(), 4210 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), 4211 descriptorCopies.size(), 4212 reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) ); 4213 } 4214 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4215 4216 template <typename Dispatch> createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,Dispatch const & d) const4217 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, 4218 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4219 VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, 4220 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4221 { 4222 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4223 return static_cast<Result>( d.vkCreateFramebuffer( static_cast<VkDevice>( m_device ), 4224 reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), 4225 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 4226 reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) ); 4227 } 4228 4229 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4230 template <typename Dispatch> 4231 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4232 Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, 4233 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4234 Dispatch const & d ) const 4235 { 4236 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4237 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4238 VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" ); 4239 # endif 4240 4241 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 4242 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4243 d.vkCreateFramebuffer( m_device, 4244 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 4245 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4246 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 4247 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); 4248 4249 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( framebuffer ) ); 4250 } 4251 4252 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4253 template <typename Dispatch> 4254 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type createFramebufferUnique(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4255 Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, 4256 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4257 Dispatch const & d ) const 4258 { 4259 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4260 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4261 VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" ); 4262 # endif 4263 4264 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 4265 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4266 d.vkCreateFramebuffer( m_device, 4267 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 4268 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4269 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 4270 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); 4271 4272 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 4273 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4274 } 4275 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4276 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4277 4278 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4279 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4280 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4282 { 4283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4284 d.vkDestroyFramebuffer( 4285 static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4286 } 4287 4288 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4289 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4290 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4291 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4292 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4293 { 4294 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4295 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4296 VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" ); 4297 # endif 4298 4299 d.vkDestroyFramebuffer( m_device, 4300 static_cast<VkFramebuffer>( framebuffer ), 4301 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4302 } 4303 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4304 4305 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4306 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4307 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4308 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4309 { 4310 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4311 d.vkDestroyFramebuffer( 4312 static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4313 } 4314 4315 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4316 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4317 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4318 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4319 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4320 { 4321 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4322 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4323 VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" ); 4324 # endif 4325 4326 d.vkDestroyFramebuffer( m_device, 4327 static_cast<VkFramebuffer>( framebuffer ), 4328 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4329 } 4330 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4331 4332 template <typename Dispatch> createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const4333 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, 4334 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4335 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 4336 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4337 { 4338 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4339 return static_cast<Result>( d.vkCreateRenderPass( static_cast<VkDevice>( m_device ), 4340 reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), 4341 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 4342 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 4343 } 4344 4345 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4346 template <typename Dispatch> 4347 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4348 Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, 4349 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4350 Dispatch const & d ) const 4351 { 4352 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4353 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4354 VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" ); 4355 # endif 4356 4357 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 4358 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4359 d.vkCreateRenderPass( m_device, 4360 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 4361 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4362 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 4363 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); 4364 4365 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); 4366 } 4367 4368 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4369 template <typename Dispatch> 4370 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPassUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4371 Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, 4372 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4373 Dispatch const & d ) const 4374 { 4375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4376 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4377 VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" ); 4378 # endif 4379 4380 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 4381 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4382 d.vkCreateRenderPass( m_device, 4383 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 4384 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4385 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 4386 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); 4387 4388 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 4389 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4390 } 4391 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4392 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4393 4394 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4395 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4396 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4397 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4398 { 4399 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4400 d.vkDestroyRenderPass( 4401 static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4402 } 4403 4404 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4405 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4406 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4407 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4408 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4409 { 4410 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4411 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4412 VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" ); 4413 # endif 4414 4415 d.vkDestroyRenderPass( m_device, 4416 static_cast<VkRenderPass>( renderPass ), 4417 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4418 } 4419 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4420 4421 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4422 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4423 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4424 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4425 { 4426 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4427 d.vkDestroyRenderPass( 4428 static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4429 } 4430 4431 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4432 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4433 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4434 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4435 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4436 { 4437 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4438 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4439 VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" ); 4440 # endif 4441 4442 d.vkDestroyRenderPass( m_device, 4443 static_cast<VkRenderPass>( renderPass ), 4444 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4445 } 4446 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4447 4448 template <typename Dispatch> getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const4449 VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4450 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 4451 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4452 { 4453 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4454 d.vkGetRenderAreaGranularity( static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 4455 } 4456 4457 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4458 template <typename Dispatch> getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Dispatch const & d) const4459 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4460 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4461 { 4462 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4463 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4464 VULKAN_HPP_ASSERT( d.vkGetRenderAreaGranularity && "Function <vkGetRenderAreaGranularity> requires <VK_VERSION_1_0>" ); 4465 # endif 4466 4467 VULKAN_HPP_NAMESPACE::Extent2D granularity; 4468 d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 4469 4470 return granularity; 4471 } 4472 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4473 4474 template <typename Dispatch> createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,Dispatch const & d) const4475 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, 4476 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4477 VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, 4478 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4479 { 4480 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4481 return static_cast<Result>( d.vkCreateCommandPool( static_cast<VkDevice>( m_device ), 4482 reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), 4483 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 4484 reinterpret_cast<VkCommandPool *>( pCommandPool ) ) ); 4485 } 4486 4487 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4488 template <typename Dispatch> 4489 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4490 Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, 4491 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4492 Dispatch const & d ) const 4493 { 4494 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4495 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4496 VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" ); 4497 # endif 4498 4499 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 4500 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4501 d.vkCreateCommandPool( m_device, 4502 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 4503 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4504 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 4505 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); 4506 4507 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandPool ) ); 4508 } 4509 4510 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4511 template <typename Dispatch> 4512 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type createCommandPoolUnique(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4513 Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, 4514 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4515 Dispatch const & d ) const 4516 { 4517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4518 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4519 VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" ); 4520 # endif 4521 4522 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 4523 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4524 d.vkCreateCommandPool( m_device, 4525 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 4526 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4527 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 4528 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); 4529 4530 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 4531 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4532 } 4533 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4534 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4535 4536 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4537 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4538 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4539 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4540 { 4541 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4542 d.vkDestroyCommandPool( 4543 static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4544 } 4545 4546 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4547 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4548 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4549 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4550 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4551 { 4552 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4553 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4554 VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" ); 4555 # endif 4556 4557 d.vkDestroyCommandPool( m_device, 4558 static_cast<VkCommandPool>( commandPool ), 4559 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4560 } 4561 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4562 4563 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4564 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4565 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4566 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4567 { 4568 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4569 d.vkDestroyCommandPool( 4570 static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4571 } 4572 4573 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4574 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4575 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4576 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4577 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4578 { 4579 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4580 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4581 VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" ); 4582 # endif 4583 4584 d.vkDestroyCommandPool( m_device, 4585 static_cast<VkCommandPool>( commandPool ), 4586 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4587 } 4588 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4589 4590 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4591 template <typename Dispatch> resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const4592 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4593 VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, 4594 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4595 { 4596 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4597 return static_cast<Result>( 4598 d.vkResetCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4599 } 4600 #else 4601 template <typename Dispatch> 4602 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const4603 Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const 4604 { 4605 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4606 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4607 VULKAN_HPP_ASSERT( d.vkResetCommandPool && "Function <vkResetCommandPool> requires <VK_VERSION_1_0>" ); 4608 # endif 4609 4610 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4611 d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4612 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); 4613 4614 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4615 } 4616 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4617 4618 template <typename Dispatch> allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4619 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, 4620 VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4621 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4622 { 4623 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4624 return static_cast<Result>( d.vkAllocateCommandBuffers( static_cast<VkDevice>( m_device ), 4625 reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), 4626 reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) ); 4627 } 4628 4629 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4630 template <typename CommandBufferAllocator, 4631 typename Dispatch, 4632 typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, VULKAN_HPP_NAMESPACE::CommandBuffer>::value, int>::type> 4633 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4634 Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4635 { 4636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4637 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4638 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4639 # endif 4640 4641 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount ); 4642 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4643 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4644 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4645 4646 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); 4647 } 4648 4649 template <typename CommandBufferAllocator, 4650 typename Dispatch, 4651 typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, VULKAN_HPP_NAMESPACE::CommandBuffer>::value, int>::type> 4652 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4653 Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, 4654 CommandBufferAllocator & commandBufferAllocator, 4655 Dispatch const & d ) const 4656 { 4657 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4658 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4659 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4660 # endif 4661 4662 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); 4663 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4664 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4665 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4666 4667 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); 4668 } 4669 4670 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4671 template < 4672 typename Dispatch, 4673 typename CommandBufferAllocator, 4674 typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>::value, 4675 int>::type> 4676 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4677 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4678 Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4679 { 4680 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4681 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4682 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4683 # endif 4684 4685 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4686 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4687 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4688 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4689 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers; 4690 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4691 detail::PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4692 for ( auto const & commandBuffer : commandBuffers ) 4693 { 4694 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); 4695 } 4696 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); 4697 } 4698 4699 template < 4700 typename Dispatch, 4701 typename CommandBufferAllocator, 4702 typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>::value, 4703 int>::type> 4704 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4705 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4706 Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, 4707 CommandBufferAllocator & commandBufferAllocator, 4708 Dispatch const & d ) const 4709 { 4710 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4711 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4712 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4713 # endif 4714 4715 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4716 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4717 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4718 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4719 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); 4720 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4721 detail::PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4722 for ( auto const & commandBuffer : commandBuffers ) 4723 { 4724 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); 4725 } 4726 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); 4727 } 4728 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4729 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4730 4731 template <typename Dispatch> freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4732 VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4733 uint32_t commandBufferCount, 4734 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4735 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4736 { 4737 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4738 d.vkFreeCommandBuffers( static_cast<VkDevice>( m_device ), 4739 static_cast<VkCommandPool>( commandPool ), 4740 commandBufferCount, 4741 reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4742 } 4743 4744 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4745 template <typename Dispatch> freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const4746 VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4747 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4748 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4749 { 4750 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4751 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4752 VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" ); 4753 # endif 4754 4755 d.vkFreeCommandBuffers( 4756 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4757 } 4758 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4759 4760 template <typename Dispatch> 4761 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4762 uint32_t commandBufferCount, 4763 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4764 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4765 { 4766 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4767 d.vkFreeCommandBuffers( static_cast<VkDevice>( m_device ), 4768 static_cast<VkCommandPool>( commandPool ), 4769 commandBufferCount, 4770 reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4771 } 4772 4773 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4774 template <typename Dispatch> 4775 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4776 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4777 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4778 { 4779 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4780 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4781 VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" ); 4782 # endif 4783 4784 d.vkFreeCommandBuffers( 4785 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4786 } 4787 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4788 4789 template <typename Dispatch> begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,Dispatch const & d) const4790 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, 4791 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4792 { 4793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4794 return static_cast<Result>( 4795 d.vkBeginCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) ); 4796 } 4797 4798 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4799 template <typename Dispatch> 4800 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo,Dispatch const & d) const4801 CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const 4802 { 4803 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4804 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4805 VULKAN_HPP_ASSERT( d.vkBeginCommandBuffer && "Function <vkBeginCommandBuffer> requires <VK_VERSION_1_0>" ); 4806 # endif 4807 4808 VULKAN_HPP_NAMESPACE::Result result = 4809 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) ); 4810 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); 4811 4812 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4813 } 4814 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4815 4816 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4817 template <typename Dispatch> end(Dispatch const & d) const4818 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4819 { 4820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4821 return static_cast<Result>( d.vkEndCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ) ) ); 4822 } 4823 #else 4824 template <typename Dispatch> end(Dispatch const & d) const4825 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const 4826 { 4827 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4828 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4829 VULKAN_HPP_ASSERT( d.vkEndCommandBuffer && "Function <vkEndCommandBuffer> requires <VK_VERSION_1_0>" ); 4830 # endif 4831 4832 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); 4833 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); 4834 4835 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4836 } 4837 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4838 4839 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4840 template <typename Dispatch> reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4841 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, 4842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4843 { 4844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4845 return static_cast<Result>( d.vkResetCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4846 } 4847 #else 4848 template <typename Dispatch> reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4849 VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const 4850 { 4851 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4852 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4853 VULKAN_HPP_ASSERT( d.vkResetCommandBuffer && "Function <vkResetCommandBuffer> requires <VK_VERSION_1_0>" ); 4854 # endif 4855 4856 VULKAN_HPP_NAMESPACE::Result result = 4857 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4858 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); 4859 4860 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4861 } 4862 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4863 4864 template <typename Dispatch> bindPipeline(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const4865 VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4866 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 4867 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4868 { 4869 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4870 d.vkCmdBindPipeline( 4871 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); 4872 } 4873 4874 template <typename Dispatch> setViewport(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const4875 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4876 uint32_t viewportCount, 4877 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 4878 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4879 { 4880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4881 d.vkCmdSetViewport( static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 4882 } 4883 4884 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4885 template <typename Dispatch> setViewport(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const4886 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4887 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 4888 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4889 { 4890 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4891 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4892 VULKAN_HPP_ASSERT( d.vkCmdSetViewport && "Function <vkCmdSetViewport> requires <VK_VERSION_1_0>" ); 4893 # endif 4894 4895 d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 4896 } 4897 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4898 4899 template <typename Dispatch> setScissor(uint32_t firstScissor,uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const4900 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4901 uint32_t scissorCount, 4902 const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, 4903 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4904 { 4905 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4906 d.vkCmdSetScissor( static_cast<VkCommandBuffer>( m_commandBuffer ), firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 4907 } 4908 4909 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4910 template <typename Dispatch> setScissor(uint32_t firstScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const4911 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4912 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 4913 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4914 { 4915 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4916 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4917 VULKAN_HPP_ASSERT( d.vkCmdSetScissor && "Function <vkCmdSetScissor> requires <VK_VERSION_1_0>" ); 4918 # endif 4919 4920 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 4921 } 4922 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4923 4924 template <typename Dispatch> setLineWidth(float lineWidth,Dispatch const & d) const4925 VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4926 { 4927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4928 d.vkCmdSetLineWidth( static_cast<VkCommandBuffer>( m_commandBuffer ), lineWidth ); 4929 } 4930 4931 template <typename Dispatch> 4932 VULKAN_HPP_INLINE void setDepthBias(float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor,Dispatch const & d) const4933 CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4934 { 4935 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4936 d.vkCmdSetDepthBias( static_cast<VkCommandBuffer>( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); 4937 } 4938 4939 template <typename Dispatch> setBlendConstants(const float blendConstants[4],Dispatch const & d) const4940 VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4941 { 4942 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4943 d.vkCmdSetBlendConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), blendConstants ); 4944 } 4945 4946 template <typename Dispatch> setDepthBounds(float minDepthBounds,float maxDepthBounds,Dispatch const & d) const4947 VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4948 { 4949 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4950 d.vkCmdSetDepthBounds( static_cast<VkCommandBuffer>( m_commandBuffer ), minDepthBounds, maxDepthBounds ); 4951 } 4952 4953 template <typename Dispatch> 4954 VULKAN_HPP_INLINE void setStencilCompareMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t compareMask,Dispatch const & d) const4955 CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4956 { 4957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4958 d.vkCmdSetStencilCompareMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), compareMask ); 4959 } 4960 4961 template <typename Dispatch> 4962 VULKAN_HPP_INLINE void setStencilWriteMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t writeMask,Dispatch const & d) const4963 CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4964 { 4965 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4966 d.vkCmdSetStencilWriteMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), writeMask ); 4967 } 4968 4969 template <typename Dispatch> 4970 VULKAN_HPP_INLINE void setStencilReference(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t reference,Dispatch const & d) const4971 CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4972 { 4973 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4974 d.vkCmdSetStencilReference( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), reference ); 4975 } 4976 4977 template <typename Dispatch> bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets,Dispatch const & d) const4978 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4979 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 4980 uint32_t firstSet, 4981 uint32_t descriptorSetCount, 4982 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4983 uint32_t dynamicOffsetCount, 4984 const uint32_t * pDynamicOffsets, 4985 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4986 { 4987 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4988 d.vkCmdBindDescriptorSets( static_cast<VkCommandBuffer>( m_commandBuffer ), 4989 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 4990 static_cast<VkPipelineLayout>( layout ), 4991 firstSet, 4992 descriptorSetCount, 4993 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), 4994 dynamicOffsetCount, 4995 pDynamicOffsets ); 4996 } 4997 4998 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4999 template <typename Dispatch> bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,Dispatch const & d) const5000 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 5001 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5002 uint32_t firstSet, 5003 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 5004 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets, 5005 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5006 { 5007 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5008 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5009 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets && "Function <vkCmdBindDescriptorSets> requires <VK_VERSION_1_0>" ); 5010 # endif 5011 5012 d.vkCmdBindDescriptorSets( m_commandBuffer, 5013 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 5014 static_cast<VkPipelineLayout>( layout ), 5015 firstSet, 5016 descriptorSets.size(), 5017 reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), 5018 dynamicOffsets.size(), 5019 dynamicOffsets.data() ); 5020 } 5021 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5022 5023 template <typename Dispatch> bindIndexBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const5024 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 5025 VULKAN_HPP_NAMESPACE::DeviceSize offset, 5026 VULKAN_HPP_NAMESPACE::IndexType indexType, 5027 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5028 { 5029 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5030 d.vkCmdBindIndexBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), 5031 static_cast<VkBuffer>( buffer ), 5032 static_cast<VkDeviceSize>( offset ), 5033 static_cast<VkIndexType>( indexType ) ); 5034 } 5035 5036 template <typename Dispatch> bindVertexBuffers(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const5037 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 5038 uint32_t bindingCount, 5039 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 5040 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 5041 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5042 { 5043 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5044 d.vkCmdBindVertexBuffers( static_cast<VkCommandBuffer>( m_commandBuffer ), 5045 firstBinding, 5046 bindingCount, 5047 reinterpret_cast<const VkBuffer *>( pBuffers ), 5048 reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); 5049 } 5050 5051 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5052 template <typename Dispatch> bindVertexBuffers(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const5053 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 5054 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 5055 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 5056 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 5057 { 5058 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5059 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5060 VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers && "Function <vkCmdBindVertexBuffers> requires <VK_VERSION_1_0>" ); 5061 # endif 5062 # ifdef VULKAN_HPP_NO_EXCEPTIONS 5063 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 5064 # else 5065 if ( buffers.size() != offsets.size() ) 5066 { 5067 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); 5068 } 5069 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 5070 5071 d.vkCmdBindVertexBuffers( m_commandBuffer, 5072 firstBinding, 5073 buffers.size(), 5074 reinterpret_cast<const VkBuffer *>( buffers.data() ), 5075 reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); 5076 } 5077 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5078 5079 template <typename Dispatch> draw(uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance,Dispatch const & d) const5080 VULKAN_HPP_INLINE void CommandBuffer::draw( 5081 uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5082 { 5083 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5084 d.vkCmdDraw( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); 5085 } 5086 5087 template <typename Dispatch> drawIndexed(uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance,Dispatch const & d) const5088 VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, 5089 uint32_t instanceCount, 5090 uint32_t firstIndex, 5091 int32_t vertexOffset, 5092 uint32_t firstInstance, 5093 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5094 { 5095 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5096 d.vkCmdDrawIndexed( static_cast<VkCommandBuffer>( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); 5097 } 5098 5099 template <typename Dispatch> drawIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const5100 VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 5101 VULKAN_HPP_NAMESPACE::DeviceSize offset, 5102 uint32_t drawCount, 5103 uint32_t stride, 5104 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5105 { 5106 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5107 d.vkCmdDrawIndirect( 5108 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 5109 } 5110 5111 template <typename Dispatch> drawIndexedIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const5112 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 5113 VULKAN_HPP_NAMESPACE::DeviceSize offset, 5114 uint32_t drawCount, 5115 uint32_t stride, 5116 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5117 { 5118 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5119 d.vkCmdDrawIndexedIndirect( 5120 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 5121 } 5122 5123 template <typename Dispatch> 5124 VULKAN_HPP_INLINE void dispatch(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const5125 CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5126 { 5127 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5128 d.vkCmdDispatch( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); 5129 } 5130 5131 template <typename Dispatch> dispatchIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const5132 VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 5133 VULKAN_HPP_NAMESPACE::DeviceSize offset, 5134 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5135 { 5136 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5137 d.vkCmdDispatchIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); 5138 } 5139 5140 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,Dispatch const & d) const5141 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5142 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5143 uint32_t regionCount, 5144 const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, 5145 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5146 { 5147 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5148 d.vkCmdCopyBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), 5149 static_cast<VkBuffer>( srcBuffer ), 5150 static_cast<VkBuffer>( dstBuffer ), 5151 regionCount, 5152 reinterpret_cast<const VkBufferCopy *>( pRegions ) ); 5153 } 5154 5155 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5156 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,Dispatch const & d) const5157 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5158 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5159 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, 5160 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5161 { 5162 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5163 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5164 VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer && "Function <vkCmdCopyBuffer> requires <VK_VERSION_1_0>" ); 5165 # endif 5166 5167 d.vkCmdCopyBuffer( m_commandBuffer, 5168 static_cast<VkBuffer>( srcBuffer ), 5169 static_cast<VkBuffer>( dstBuffer ), 5170 regions.size(), 5171 reinterpret_cast<const VkBufferCopy *>( regions.data() ) ); 5172 } 5173 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5174 5175 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,Dispatch const & d) const5176 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5177 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5178 VULKAN_HPP_NAMESPACE::Image dstImage, 5179 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5180 uint32_t regionCount, 5181 const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, 5182 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5183 { 5184 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5185 d.vkCmdCopyImage( static_cast<VkCommandBuffer>( m_commandBuffer ), 5186 static_cast<VkImage>( srcImage ), 5187 static_cast<VkImageLayout>( srcImageLayout ), 5188 static_cast<VkImage>( dstImage ), 5189 static_cast<VkImageLayout>( dstImageLayout ), 5190 regionCount, 5191 reinterpret_cast<const VkImageCopy *>( pRegions ) ); 5192 } 5193 5194 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5195 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,Dispatch const & d) const5196 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5197 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5198 VULKAN_HPP_NAMESPACE::Image dstImage, 5199 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5200 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, 5201 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5202 { 5203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5204 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5205 VULKAN_HPP_ASSERT( d.vkCmdCopyImage && "Function <vkCmdCopyImage> requires <VK_VERSION_1_0>" ); 5206 # endif 5207 5208 d.vkCmdCopyImage( m_commandBuffer, 5209 static_cast<VkImage>( srcImage ), 5210 static_cast<VkImageLayout>( srcImageLayout ), 5211 static_cast<VkImage>( dstImage ), 5212 static_cast<VkImageLayout>( dstImageLayout ), 5213 regions.size(), 5214 reinterpret_cast<const VkImageCopy *>( regions.data() ) ); 5215 } 5216 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5217 5218 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const5219 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5220 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5221 VULKAN_HPP_NAMESPACE::Image dstImage, 5222 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5223 uint32_t regionCount, 5224 const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, 5225 VULKAN_HPP_NAMESPACE::Filter filter, 5226 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5227 { 5228 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5229 d.vkCmdBlitImage( static_cast<VkCommandBuffer>( m_commandBuffer ), 5230 static_cast<VkImage>( srcImage ), 5231 static_cast<VkImageLayout>( srcImageLayout ), 5232 static_cast<VkImage>( dstImage ), 5233 static_cast<VkImageLayout>( dstImageLayout ), 5234 regionCount, 5235 reinterpret_cast<const VkImageBlit *>( pRegions ), 5236 static_cast<VkFilter>( filter ) ); 5237 } 5238 5239 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5240 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const5241 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5242 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5243 VULKAN_HPP_NAMESPACE::Image dstImage, 5244 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5245 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, 5246 VULKAN_HPP_NAMESPACE::Filter filter, 5247 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5248 { 5249 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5250 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5251 VULKAN_HPP_ASSERT( d.vkCmdBlitImage && "Function <vkCmdBlitImage> requires <VK_VERSION_1_0>" ); 5252 # endif 5253 5254 d.vkCmdBlitImage( m_commandBuffer, 5255 static_cast<VkImage>( srcImage ), 5256 static_cast<VkImageLayout>( srcImageLayout ), 5257 static_cast<VkImage>( dstImage ), 5258 static_cast<VkImageLayout>( dstImageLayout ), 5259 regions.size(), 5260 reinterpret_cast<const VkImageBlit *>( regions.data() ), 5261 static_cast<VkFilter>( filter ) ); 5262 } 5263 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5264 5265 template <typename Dispatch> copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const5266 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5267 VULKAN_HPP_NAMESPACE::Image dstImage, 5268 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5269 uint32_t regionCount, 5270 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 5271 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5272 { 5273 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5274 d.vkCmdCopyBufferToImage( static_cast<VkCommandBuffer>( m_commandBuffer ), 5275 static_cast<VkBuffer>( srcBuffer ), 5276 static_cast<VkImage>( dstImage ), 5277 static_cast<VkImageLayout>( dstImageLayout ), 5278 regionCount, 5279 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 5280 } 5281 5282 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5283 template <typename Dispatch> copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const5284 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5285 VULKAN_HPP_NAMESPACE::Image dstImage, 5286 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5287 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 5288 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5289 { 5290 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5291 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5292 VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage && "Function <vkCmdCopyBufferToImage> requires <VK_VERSION_1_0>" ); 5293 # endif 5294 5295 d.vkCmdCopyBufferToImage( m_commandBuffer, 5296 static_cast<VkBuffer>( srcBuffer ), 5297 static_cast<VkImage>( dstImage ), 5298 static_cast<VkImageLayout>( dstImageLayout ), 5299 regions.size(), 5300 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 5301 } 5302 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5303 5304 template <typename Dispatch> copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const5305 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 5306 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5307 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5308 uint32_t regionCount, 5309 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 5310 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5311 { 5312 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5313 d.vkCmdCopyImageToBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), 5314 static_cast<VkImage>( srcImage ), 5315 static_cast<VkImageLayout>( srcImageLayout ), 5316 static_cast<VkBuffer>( dstBuffer ), 5317 regionCount, 5318 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 5319 } 5320 5321 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5322 template <typename Dispatch> copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const5323 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 5324 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5325 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5326 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 5327 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5328 { 5329 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5330 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5331 VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer && "Function <vkCmdCopyImageToBuffer> requires <VK_VERSION_1_0>" ); 5332 # endif 5333 5334 d.vkCmdCopyImageToBuffer( m_commandBuffer, 5335 static_cast<VkImage>( srcImage ), 5336 static_cast<VkImageLayout>( srcImageLayout ), 5337 static_cast<VkBuffer>( dstBuffer ), 5338 regions.size(), 5339 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 5340 } 5341 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5342 5343 template <typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize dataSize,const void * pData,Dispatch const & d) const5344 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5345 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5346 VULKAN_HPP_NAMESPACE::DeviceSize dataSize, 5347 const void * pData, 5348 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5349 { 5350 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5351 d.vkCmdUpdateBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), 5352 static_cast<VkBuffer>( dstBuffer ), 5353 static_cast<VkDeviceSize>( dstOffset ), 5354 static_cast<VkDeviceSize>( dataSize ), 5355 pData ); 5356 } 5357 5358 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5359 template <typename DataType, typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data,Dispatch const & d) const5360 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5361 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5362 VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data, 5363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5364 { 5365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5366 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5367 VULKAN_HPP_ASSERT( d.vkCmdUpdateBuffer && "Function <vkCmdUpdateBuffer> requires <VK_VERSION_1_0>" ); 5368 # endif 5369 5370 d.vkCmdUpdateBuffer( m_commandBuffer, 5371 static_cast<VkBuffer>( dstBuffer ), 5372 static_cast<VkDeviceSize>( dstOffset ), 5373 data.size() * sizeof( DataType ), 5374 reinterpret_cast<const void *>( data.data() ) ); 5375 } 5376 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5377 5378 template <typename Dispatch> fillBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize size,uint32_t data,Dispatch const & d) const5379 VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5380 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5381 VULKAN_HPP_NAMESPACE::DeviceSize size, 5382 uint32_t data, 5383 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5384 { 5385 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5386 d.vkCmdFillBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), 5387 static_cast<VkBuffer>( dstBuffer ), 5388 static_cast<VkDeviceSize>( dstOffset ), 5389 static_cast<VkDeviceSize>( size ), 5390 data ); 5391 } 5392 5393 template <typename Dispatch> clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const5394 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 5395 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5396 const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, 5397 uint32_t rangeCount, 5398 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 5399 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5400 { 5401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5402 d.vkCmdClearColorImage( static_cast<VkCommandBuffer>( m_commandBuffer ), 5403 static_cast<VkImage>( image ), 5404 static_cast<VkImageLayout>( imageLayout ), 5405 reinterpret_cast<const VkClearColorValue *>( pColor ), 5406 rangeCount, 5407 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 5408 } 5409 5410 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5411 template <typename Dispatch> clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue & color,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const5412 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 5413 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5414 const VULKAN_HPP_NAMESPACE::ClearColorValue & color, 5415 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 5416 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5417 { 5418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5419 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5420 VULKAN_HPP_ASSERT( d.vkCmdClearColorImage && "Function <vkCmdClearColorImage> requires <VK_VERSION_1_0>" ); 5421 # endif 5422 5423 d.vkCmdClearColorImage( m_commandBuffer, 5424 static_cast<VkImage>( image ), 5425 static_cast<VkImageLayout>( imageLayout ), 5426 reinterpret_cast<const VkClearColorValue *>( &color ), 5427 ranges.size(), 5428 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 5429 } 5430 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5431 5432 template <typename Dispatch> clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const5433 VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 5434 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5435 const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, 5436 uint32_t rangeCount, 5437 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 5438 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5439 { 5440 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5441 d.vkCmdClearDepthStencilImage( static_cast<VkCommandBuffer>( m_commandBuffer ), 5442 static_cast<VkImage>( image ), 5443 static_cast<VkImageLayout>( imageLayout ), 5444 reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), 5445 rangeCount, 5446 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 5447 } 5448 5449 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5450 template <typename Dispatch> 5451 VULKAN_HPP_INLINE void clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const5452 CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 5453 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5454 const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, 5455 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 5456 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5457 { 5458 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5459 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5460 VULKAN_HPP_ASSERT( d.vkCmdClearDepthStencilImage && "Function <vkCmdClearDepthStencilImage> requires <VK_VERSION_1_0>" ); 5461 # endif 5462 5463 d.vkCmdClearDepthStencilImage( m_commandBuffer, 5464 static_cast<VkImage>( image ), 5465 static_cast<VkImageLayout>( imageLayout ), 5466 reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), 5467 ranges.size(), 5468 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 5469 } 5470 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5471 5472 template <typename Dispatch> clearAttachments(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,uint32_t rectCount,const VULKAN_HPP_NAMESPACE::ClearRect * pRects,Dispatch const & d) const5473 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, 5474 const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, 5475 uint32_t rectCount, 5476 const VULKAN_HPP_NAMESPACE::ClearRect * pRects, 5477 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5478 { 5479 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5480 d.vkCmdClearAttachments( static_cast<VkCommandBuffer>( m_commandBuffer ), 5481 attachmentCount, 5482 reinterpret_cast<const VkClearAttachment *>( pAttachments ), 5483 rectCount, 5484 reinterpret_cast<const VkClearRect *>( pRects ) ); 5485 } 5486 5487 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5488 template <typename Dispatch> clearAttachments(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,Dispatch const & d) const5489 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, 5490 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, 5491 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5492 { 5493 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5494 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5495 VULKAN_HPP_ASSERT( d.vkCmdClearAttachments && "Function <vkCmdClearAttachments> requires <VK_VERSION_1_0>" ); 5496 # endif 5497 5498 d.vkCmdClearAttachments( m_commandBuffer, 5499 attachments.size(), 5500 reinterpret_cast<const VkClearAttachment *>( attachments.data() ), 5501 rects.size(), 5502 reinterpret_cast<const VkClearRect *>( rects.data() ) ); 5503 } 5504 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5505 5506 template <typename Dispatch> resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,Dispatch const & d) const5507 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5508 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5509 VULKAN_HPP_NAMESPACE::Image dstImage, 5510 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5511 uint32_t regionCount, 5512 const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, 5513 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5514 { 5515 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5516 d.vkCmdResolveImage( static_cast<VkCommandBuffer>( m_commandBuffer ), 5517 static_cast<VkImage>( srcImage ), 5518 static_cast<VkImageLayout>( srcImageLayout ), 5519 static_cast<VkImage>( dstImage ), 5520 static_cast<VkImageLayout>( dstImageLayout ), 5521 regionCount, 5522 reinterpret_cast<const VkImageResolve *>( pRegions ) ); 5523 } 5524 5525 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5526 template <typename Dispatch> resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,Dispatch const & d) const5527 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5528 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5529 VULKAN_HPP_NAMESPACE::Image dstImage, 5530 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5531 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, 5532 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5533 { 5534 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5535 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5536 VULKAN_HPP_ASSERT( d.vkCmdResolveImage && "Function <vkCmdResolveImage> requires <VK_VERSION_1_0>" ); 5537 # endif 5538 5539 d.vkCmdResolveImage( m_commandBuffer, 5540 static_cast<VkImage>( srcImage ), 5541 static_cast<VkImageLayout>( srcImageLayout ), 5542 static_cast<VkImage>( dstImage ), 5543 static_cast<VkImageLayout>( dstImageLayout ), 5544 regions.size(), 5545 reinterpret_cast<const VkImageResolve *>( regions.data() ) ); 5546 } 5547 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5548 5549 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const5550 VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, 5551 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 5552 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5553 { 5554 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5555 d.vkCmdSetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 5556 } 5557 5558 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const5559 VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, 5560 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 5561 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5562 { 5563 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5564 d.vkCmdResetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 5565 } 5566 5567 template <typename Dispatch> waitEvents(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const5568 VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, 5569 const VULKAN_HPP_NAMESPACE::Event * pEvents, 5570 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5571 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5572 uint32_t memoryBarrierCount, 5573 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 5574 uint32_t bufferMemoryBarrierCount, 5575 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 5576 uint32_t imageMemoryBarrierCount, 5577 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 5578 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5579 { 5580 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5581 d.vkCmdWaitEvents( static_cast<VkCommandBuffer>( m_commandBuffer ), 5582 eventCount, 5583 reinterpret_cast<const VkEvent *>( pEvents ), 5584 static_cast<VkPipelineStageFlags>( srcStageMask ), 5585 static_cast<VkPipelineStageFlags>( dstStageMask ), 5586 memoryBarrierCount, 5587 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 5588 bufferMemoryBarrierCount, 5589 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 5590 imageMemoryBarrierCount, 5591 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 5592 } 5593 5594 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5595 template <typename Dispatch> 5596 VULKAN_HPP_INLINE void waitEvents(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const5597 CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 5598 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5599 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5600 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 5601 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 5602 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 5603 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5604 { 5605 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5606 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5607 VULKAN_HPP_ASSERT( d.vkCmdWaitEvents && "Function <vkCmdWaitEvents> requires <VK_VERSION_1_0>" ); 5608 # endif 5609 5610 d.vkCmdWaitEvents( m_commandBuffer, 5611 events.size(), 5612 reinterpret_cast<const VkEvent *>( events.data() ), 5613 static_cast<VkPipelineStageFlags>( srcStageMask ), 5614 static_cast<VkPipelineStageFlags>( dstStageMask ), 5615 memoryBarriers.size(), 5616 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 5617 bufferMemoryBarriers.size(), 5618 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 5619 imageMemoryBarriers.size(), 5620 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 5621 } 5622 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5623 5624 template <typename Dispatch> pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const5625 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5626 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5627 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 5628 uint32_t memoryBarrierCount, 5629 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 5630 uint32_t bufferMemoryBarrierCount, 5631 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 5632 uint32_t imageMemoryBarrierCount, 5633 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 5634 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5635 { 5636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5637 d.vkCmdPipelineBarrier( static_cast<VkCommandBuffer>( m_commandBuffer ), 5638 static_cast<VkPipelineStageFlags>( srcStageMask ), 5639 static_cast<VkPipelineStageFlags>( dstStageMask ), 5640 static_cast<VkDependencyFlags>( dependencyFlags ), 5641 memoryBarrierCount, 5642 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 5643 bufferMemoryBarrierCount, 5644 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 5645 imageMemoryBarrierCount, 5646 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 5647 } 5648 5649 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5650 template <typename Dispatch> 5651 VULKAN_HPP_INLINE void pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const5652 CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5653 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5654 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 5655 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 5656 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 5657 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 5658 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5659 { 5660 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5661 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5662 VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier && "Function <vkCmdPipelineBarrier> requires <VK_VERSION_1_0>" ); 5663 # endif 5664 5665 d.vkCmdPipelineBarrier( m_commandBuffer, 5666 static_cast<VkPipelineStageFlags>( srcStageMask ), 5667 static_cast<VkPipelineStageFlags>( dstStageMask ), 5668 static_cast<VkDependencyFlags>( dependencyFlags ), 5669 memoryBarriers.size(), 5670 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 5671 bufferMemoryBarriers.size(), 5672 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 5673 imageMemoryBarriers.size(), 5674 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 5675 } 5676 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5677 5678 template <typename Dispatch> beginQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,Dispatch const & d) const5679 VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5680 uint32_t query, 5681 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 5682 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5683 { 5684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5685 d.vkCmdBeginQuery( 5686 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) ); 5687 } 5688 5689 template <typename Dispatch> endQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const5690 VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5691 { 5692 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5693 d.vkCmdEndQuery( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query ); 5694 } 5695 5696 template <typename Dispatch> resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const5697 VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5698 uint32_t firstQuery, 5699 uint32_t queryCount, 5700 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5701 { 5702 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5703 d.vkCmdResetQueryPool( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 5704 } 5705 5706 template <typename Dispatch> writeTimestamp(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const5707 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 5708 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5709 uint32_t query, 5710 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5711 { 5712 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5713 d.vkCmdWriteTimestamp( 5714 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query ); 5715 } 5716 5717 template <typename Dispatch> copyQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const5718 VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5719 uint32_t firstQuery, 5720 uint32_t queryCount, 5721 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5722 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5723 VULKAN_HPP_NAMESPACE::DeviceSize stride, 5724 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 5725 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5726 { 5727 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5728 d.vkCmdCopyQueryPoolResults( static_cast<VkCommandBuffer>( m_commandBuffer ), 5729 static_cast<VkQueryPool>( queryPool ), 5730 firstQuery, 5731 queryCount, 5732 static_cast<VkBuffer>( dstBuffer ), 5733 static_cast<VkDeviceSize>( dstOffset ), 5734 static_cast<VkDeviceSize>( stride ), 5735 static_cast<VkQueryResultFlags>( flags ) ); 5736 } 5737 5738 template <typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues,Dispatch const & d) const5739 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5740 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5741 uint32_t offset, 5742 uint32_t size, 5743 const void * pValues, 5744 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5745 { 5746 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5747 d.vkCmdPushConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), 5748 static_cast<VkPipelineLayout>( layout ), 5749 static_cast<VkShaderStageFlags>( stageFlags ), 5750 offset, 5751 size, 5752 pValues ); 5753 } 5754 5755 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5756 template <typename ValuesType, typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values,Dispatch const & d) const5757 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5758 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5759 uint32_t offset, 5760 VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values, 5761 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5762 { 5763 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5764 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5765 VULKAN_HPP_ASSERT( d.vkCmdPushConstants && "Function <vkCmdPushConstants> requires <VK_VERSION_1_0>" ); 5766 # endif 5767 5768 d.vkCmdPushConstants( m_commandBuffer, 5769 static_cast<VkPipelineLayout>( layout ), 5770 static_cast<VkShaderStageFlags>( stageFlags ), 5771 offset, 5772 values.size() * sizeof( ValuesType ), 5773 reinterpret_cast<const void *>( values.data() ) ); 5774 } 5775 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5776 5777 template <typename Dispatch> beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5778 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 5779 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5780 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5781 { 5782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5783 d.vkCmdBeginRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ), 5784 reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), 5785 static_cast<VkSubpassContents>( contents ) ); 5786 } 5787 5788 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5789 template <typename Dispatch> beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5790 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 5791 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5792 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5793 { 5794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5795 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5796 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass && "Function <vkCmdBeginRenderPass> requires <VK_VERSION_1_0>" ); 5797 # endif 5798 5799 d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) ); 5800 } 5801 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5802 5803 template <typename Dispatch> nextSubpass(VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5804 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5805 { 5806 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5807 d.vkCmdNextSubpass( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSubpassContents>( contents ) ); 5808 } 5809 5810 template <typename Dispatch> endRenderPass(Dispatch const & d) const5811 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5812 { 5813 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5814 d.vkCmdEndRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ) ); 5815 } 5816 5817 template <typename Dispatch> executeCommands(uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const5818 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, 5819 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 5820 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5821 { 5822 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5823 d.vkCmdExecuteCommands( static_cast<VkCommandBuffer>( m_commandBuffer ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 5824 } 5825 5826 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5827 template <typename Dispatch> executeCommands(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const5828 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 5829 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5830 { 5831 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5832 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5833 VULKAN_HPP_ASSERT( d.vkCmdExecuteCommands && "Function <vkCmdExecuteCommands> requires <VK_VERSION_1_0>" ); 5834 # endif 5835 5836 d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 5837 } 5838 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5839 5840 //=== VK_VERSION_1_1 === 5841 5842 template <typename Dispatch> enumerateInstanceVersion(uint32_t * pApiVersion,Dispatch const & d)5843 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT 5844 { 5845 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5846 return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) ); 5847 } 5848 5849 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5850 template <typename Dispatch> enumerateInstanceVersion(Dispatch const & d)5851 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d ) 5852 { 5853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5854 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5855 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceVersion && "Function <vkEnumerateInstanceVersion> requires <VK_VERSION_1_1>" ); 5856 # endif 5857 5858 uint32_t apiVersion; 5859 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceVersion( &apiVersion ) ); 5860 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); 5861 5862 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( apiVersion ) ); 5863 } 5864 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5865 5866 template <typename Dispatch> bindBufferMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const5867 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, 5868 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 5869 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5870 { 5871 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5872 return static_cast<Result>( 5873 d.vkBindBufferMemory2( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 5874 } 5875 5876 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5877 template <typename Dispatch> 5878 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const5879 Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const 5880 { 5881 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5882 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5883 VULKAN_HPP_ASSERT( d.vkBindBufferMemory2 && "Function <vkBindBufferMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 5884 # endif 5885 5886 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5887 d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 5888 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); 5889 5890 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 5891 } 5892 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5893 5894 template <typename Dispatch> bindImageMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const5895 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, 5896 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 5897 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5898 { 5899 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5900 return static_cast<Result>( 5901 d.vkBindImageMemory2( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 5902 } 5903 5904 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5905 template <typename Dispatch> 5906 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const5907 Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const 5908 { 5909 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5910 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5911 VULKAN_HPP_ASSERT( d.vkBindImageMemory2 && "Function <vkBindImageMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 5912 # endif 5913 5914 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5915 d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 5916 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); 5917 5918 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 5919 } 5920 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5921 5922 template <typename Dispatch> getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const5923 VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, 5924 uint32_t localDeviceIndex, 5925 uint32_t remoteDeviceIndex, 5926 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 5927 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5928 { 5929 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5930 d.vkGetDeviceGroupPeerMemoryFeatures( 5931 static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 5932 } 5933 5934 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5935 template <typename Dispatch> getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const5936 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( 5937 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5938 { 5939 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5940 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5941 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeatures && 5942 "Function <vkGetDeviceGroupPeerMemoryFeatures> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" ); 5943 # endif 5944 5945 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 5946 d.vkGetDeviceGroupPeerMemoryFeatures( 5947 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 5948 5949 return peerMemoryFeatures; 5950 } 5951 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5952 5953 template <typename Dispatch> setDeviceMask(uint32_t deviceMask,Dispatch const & d) const5954 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5955 { 5956 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5957 d.vkCmdSetDeviceMask( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask ); 5958 } 5959 5960 template <typename Dispatch> dispatchBase(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const5961 VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, 5962 uint32_t baseGroupY, 5963 uint32_t baseGroupZ, 5964 uint32_t groupCountX, 5965 uint32_t groupCountY, 5966 uint32_t groupCountZ, 5967 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5968 { 5969 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5970 d.vkCmdDispatchBase( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 5971 } 5972 5973 template <typename Dispatch> 5974 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumeratePhysicalDeviceGroups(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const5975 Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, 5976 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 5977 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5978 { 5979 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5980 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ), 5981 pPhysicalDeviceGroupCount, 5982 reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 5983 } 5984 5985 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5986 template <typename PhysicalDeviceGroupPropertiesAllocator, 5987 typename Dispatch, 5988 typename std::enable_if< 5989 std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, 5990 int>::type> 5991 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 5992 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(Dispatch const & d) const5993 Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const 5994 { 5995 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5996 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5997 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && 5998 "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 5999 # endif 6000 6001 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 6002 uint32_t physicalDeviceGroupCount; 6003 VULKAN_HPP_NAMESPACE::Result result; 6004 do 6005 { 6006 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 6007 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 6008 { 6009 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 6010 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( 6011 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 6012 } 6013 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 6014 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 6015 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 6016 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 6017 { 6018 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 6019 } 6020 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 6021 } 6022 6023 template <typename PhysicalDeviceGroupPropertiesAllocator, 6024 typename Dispatch, 6025 typename std::enable_if< 6026 std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, 6027 int>::type> 6028 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 6029 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const6030 Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 6031 { 6032 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6033 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6034 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && 6035 "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 6036 # endif 6037 6038 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 6039 physicalDeviceGroupPropertiesAllocator ); 6040 uint32_t physicalDeviceGroupCount; 6041 VULKAN_HPP_NAMESPACE::Result result; 6042 do 6043 { 6044 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 6045 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 6046 { 6047 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 6048 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( 6049 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 6050 } 6051 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 6052 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 6053 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 6054 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 6055 { 6056 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 6057 } 6058 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 6059 } 6060 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6061 6062 template <typename Dispatch> getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const6063 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 6064 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 6065 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6066 { 6067 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6068 d.vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ), 6069 reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), 6070 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 6071 } 6072 6073 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6074 template <typename Dispatch> 6075 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const6076 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6077 { 6078 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6079 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6080 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && 6081 "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6082 # endif 6083 6084 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 6085 d.vkGetImageMemoryRequirements2( 6086 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 6087 6088 return memoryRequirements; 6089 } 6090 6091 template <typename X, typename Y, typename... Z, typename Dispatch> 6092 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const6093 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6094 { 6095 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6096 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6097 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && 6098 "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6099 # endif 6100 6101 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6102 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 6103 d.vkGetImageMemoryRequirements2( 6104 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 6105 6106 return structureChain; 6107 } 6108 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6109 6110 template <typename Dispatch> getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const6111 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 6112 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 6113 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6114 { 6115 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6116 d.vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ), 6117 reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), 6118 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 6119 } 6120 6121 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6122 template <typename Dispatch> 6123 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const6124 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6125 { 6126 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6127 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6128 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && 6129 "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6130 # endif 6131 6132 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 6133 d.vkGetBufferMemoryRequirements2( 6134 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 6135 6136 return memoryRequirements; 6137 } 6138 6139 template <typename X, typename Y, typename... Z, typename Dispatch> 6140 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const6141 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6142 { 6143 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6144 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6145 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && 6146 "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6147 # endif 6148 6149 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6150 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 6151 d.vkGetBufferMemoryRequirements2( 6152 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 6153 6154 return structureChain; 6155 } 6156 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6157 6158 template <typename Dispatch> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const6159 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 6160 uint32_t * pSparseMemoryRequirementCount, 6161 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 6162 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6163 { 6164 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6165 d.vkGetImageSparseMemoryRequirements2( static_cast<VkDevice>( m_device ), 6166 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 6167 pSparseMemoryRequirementCount, 6168 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 6169 } 6170 6171 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6172 template <typename SparseImageMemoryRequirements2Allocator, 6173 typename Dispatch, 6174 typename std::enable_if< 6175 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 6176 int>::type> 6177 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const6178 Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const 6179 { 6180 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6181 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6182 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && 6183 "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6184 # endif 6185 6186 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 6187 uint32_t sparseMemoryRequirementCount; 6188 d.vkGetImageSparseMemoryRequirements2( 6189 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 6190 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6191 d.vkGetImageSparseMemoryRequirements2( m_device, 6192 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 6193 &sparseMemoryRequirementCount, 6194 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 6195 6196 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 6197 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 6198 { 6199 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6200 } 6201 return sparseMemoryRequirements; 6202 } 6203 6204 template <typename SparseImageMemoryRequirements2Allocator, 6205 typename Dispatch, 6206 typename std::enable_if< 6207 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 6208 int>::type> 6209 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const6210 Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, 6211 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 6212 Dispatch const & d ) const 6213 { 6214 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6215 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6216 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && 6217 "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6218 # endif 6219 6220 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 6221 sparseImageMemoryRequirements2Allocator ); 6222 uint32_t sparseMemoryRequirementCount; 6223 d.vkGetImageSparseMemoryRequirements2( 6224 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 6225 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6226 d.vkGetImageSparseMemoryRequirements2( m_device, 6227 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 6228 &sparseMemoryRequirementCount, 6229 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 6230 6231 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 6232 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 6233 { 6234 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6235 } 6236 return sparseMemoryRequirements; 6237 } 6238 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6239 6240 template <typename Dispatch> getFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const6241 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6242 { 6243 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6244 d.vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 6245 } 6246 6247 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6248 template <typename Dispatch> 6249 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const & d) const6250 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6251 { 6252 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6253 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6254 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && 6255 "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6256 # endif 6257 6258 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 6259 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 6260 6261 return features; 6262 } 6263 6264 template <typename X, typename Y, typename... Z, typename Dispatch> 6265 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2(Dispatch const & d) const6266 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6267 { 6268 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6269 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6270 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && 6271 "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6272 # endif 6273 6274 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6275 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 6276 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 6277 6278 return structureChain; 6279 } 6280 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6281 6282 template <typename Dispatch> getProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const6283 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 6284 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6285 { 6286 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6287 d.vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 6288 } 6289 6290 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6291 template <typename Dispatch> 6292 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const & d) const6293 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6294 { 6295 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6296 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6297 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && 6298 "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6299 # endif 6300 6301 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 6302 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 6303 6304 return properties; 6305 } 6306 6307 template <typename X, typename Y, typename... Z, typename Dispatch> 6308 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2(Dispatch const & d) const6309 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6310 { 6311 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6312 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6313 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && 6314 "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6315 # endif 6316 6317 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6318 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 6319 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 6320 6321 return structureChain; 6322 } 6323 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6324 6325 template <typename Dispatch> getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const6326 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, 6327 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 6328 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6329 { 6330 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6331 d.vkGetPhysicalDeviceFormatProperties2( 6332 static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 6333 } 6334 6335 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6336 template <typename Dispatch> 6337 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const6338 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6339 { 6340 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6341 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6342 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && 6343 "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6344 # endif 6345 6346 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 6347 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 6348 6349 return formatProperties; 6350 } 6351 6352 template <typename X, typename Y, typename... Z, typename Dispatch> 6353 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const6354 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6355 { 6356 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6357 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6358 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && 6359 "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6360 # endif 6361 6362 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6363 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 6364 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 6365 6366 return structureChain; 6367 } 6368 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6369 6370 template <typename Dispatch> 6371 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const6372 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 6373 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 6374 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6375 { 6376 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6377 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), 6378 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 6379 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 6380 } 6381 6382 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6383 template <typename Dispatch> 6384 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const6385 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 6386 { 6387 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6388 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6389 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && 6390 "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6391 # endif 6392 6393 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 6394 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6395 d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 6396 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 6397 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 6398 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 6399 6400 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 6401 } 6402 6403 template <typename X, typename Y, typename... Z, typename Dispatch> 6404 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const6405 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 6406 { 6407 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6408 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6409 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && 6410 "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6411 # endif 6412 6413 StructureChain<X, Y, Z...> structureChain; 6414 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 6415 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6416 d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 6417 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 6418 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 6419 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 6420 6421 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 6422 } 6423 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6424 6425 template <typename Dispatch> getQueueFamilyProperties2(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const6426 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, 6427 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 6428 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6429 { 6430 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6431 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6432 static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 6433 } 6434 6435 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6436 template < 6437 typename QueueFamilyProperties2Allocator, 6438 typename Dispatch, 6439 typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 6440 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(Dispatch const & d) const6441 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 6442 { 6443 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6444 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6445 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6446 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6447 # endif 6448 6449 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 6450 uint32_t queueFamilyPropertyCount; 6451 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6452 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6453 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6454 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6455 6456 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6457 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6458 { 6459 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6460 } 6461 return queueFamilyProperties; 6462 } 6463 6464 template < 6465 typename QueueFamilyProperties2Allocator, 6466 typename Dispatch, 6467 typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 6468 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const6469 PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const 6470 { 6471 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6472 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6473 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6474 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6475 # endif 6476 6477 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); 6478 uint32_t queueFamilyPropertyCount; 6479 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6480 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6481 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6482 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6483 6484 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6485 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6486 { 6487 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6488 } 6489 return queueFamilyProperties; 6490 } 6491 6492 template <typename StructureChain, 6493 typename StructureChainAllocator, 6494 typename Dispatch, 6495 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 6496 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(Dispatch const & d) const6497 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 6498 { 6499 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6500 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6501 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6502 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6503 # endif 6504 6505 std::vector<StructureChain, StructureChainAllocator> structureChains; 6506 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 6507 uint32_t queueFamilyPropertyCount; 6508 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6509 structureChains.resize( queueFamilyPropertyCount ); 6510 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6511 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6512 { 6513 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 6514 } 6515 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6516 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6517 6518 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6519 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6520 { 6521 structureChains.resize( queueFamilyPropertyCount ); 6522 } 6523 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6524 { 6525 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 6526 } 6527 return structureChains; 6528 } 6529 6530 template <typename StructureChain, 6531 typename StructureChainAllocator, 6532 typename Dispatch, 6533 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 6534 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const6535 PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const 6536 { 6537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6538 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6539 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6540 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6541 # endif 6542 6543 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 6544 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 6545 uint32_t queueFamilyPropertyCount; 6546 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6547 structureChains.resize( queueFamilyPropertyCount ); 6548 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6549 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6550 { 6551 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 6552 } 6553 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6554 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6555 6556 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6557 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6558 { 6559 structureChains.resize( queueFamilyPropertyCount ); 6560 } 6561 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6562 { 6563 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 6564 } 6565 return structureChains; 6566 } 6567 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6568 6569 template <typename Dispatch> getMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const6570 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 6571 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6572 { 6573 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6574 d.vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), 6575 reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 6576 } 6577 6578 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6579 template <typename Dispatch> 6580 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const & d) const6581 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6582 { 6583 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6584 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6585 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && 6586 "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6587 # endif 6588 6589 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 6590 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 6591 6592 return memoryProperties; 6593 } 6594 6595 template <typename X, typename Y, typename... Z, typename Dispatch> 6596 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2(Dispatch const & d) const6597 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6598 { 6599 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6600 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6601 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && 6602 "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6603 # endif 6604 6605 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6606 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 6607 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 6608 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 6609 6610 return structureChain; 6611 } 6612 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6613 6614 template <typename Dispatch> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const6615 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 6616 uint32_t * pPropertyCount, 6617 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 6618 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6619 { 6620 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6621 d.vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), 6622 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 6623 pPropertyCount, 6624 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 6625 } 6626 6627 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6628 template < 6629 typename SparseImageFormatProperties2Allocator, 6630 typename Dispatch, 6631 typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, 6632 int>::type> 6633 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const6634 PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const 6635 { 6636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6637 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6638 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && 6639 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6640 # endif 6641 6642 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 6643 uint32_t propertyCount; 6644 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 6645 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 6646 properties.resize( propertyCount ); 6647 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 6648 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 6649 &propertyCount, 6650 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 6651 6652 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 6653 if ( propertyCount < properties.size() ) 6654 { 6655 properties.resize( propertyCount ); 6656 } 6657 return properties; 6658 } 6659 6660 template < 6661 typename SparseImageFormatProperties2Allocator, 6662 typename Dispatch, 6663 typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, 6664 int>::type> 6665 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const6666 PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 6667 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 6668 Dispatch const & d ) const 6669 { 6670 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6671 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6672 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && 6673 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6674 # endif 6675 6676 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); 6677 uint32_t propertyCount; 6678 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 6679 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 6680 properties.resize( propertyCount ); 6681 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 6682 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 6683 &propertyCount, 6684 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 6685 6686 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 6687 if ( propertyCount < properties.size() ) 6688 { 6689 properties.resize( propertyCount ); 6690 } 6691 return properties; 6692 } 6693 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6694 6695 template <typename Dispatch> trimCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const6696 VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 6697 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 6698 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6699 { 6700 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6701 d.vkTrimCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 6702 } 6703 6704 template <typename Dispatch> getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const6705 VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, 6706 VULKAN_HPP_NAMESPACE::Queue * pQueue, 6707 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6708 { 6709 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6710 d.vkGetDeviceQueue2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) ); 6711 } 6712 6713 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6714 template <typename Dispatch> getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,Dispatch const & d) const6715 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, 6716 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6717 { 6718 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6719 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6720 VULKAN_HPP_ASSERT( d.vkGetDeviceQueue2 && "Function <vkGetDeviceQueue2> requires <VK_VERSION_1_1>" ); 6721 # endif 6722 6723 VULKAN_HPP_NAMESPACE::Queue queue; 6724 d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) ); 6725 6726 return queue; 6727 } 6728 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6729 6730 template <typename Dispatch> 6731 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const6732 Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 6733 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6734 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 6735 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6736 { 6737 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6738 return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( static_cast<VkDevice>( m_device ), 6739 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 6740 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6741 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 6742 } 6743 6744 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6745 template <typename Dispatch> 6746 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6747 Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 6748 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6749 Dispatch const & d ) const 6750 { 6751 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6752 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6753 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && 6754 "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6755 # endif 6756 6757 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 6758 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion( 6759 m_device, 6760 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 6761 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6762 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 6763 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); 6764 6765 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); 6766 } 6767 6768 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6769 template <typename Dispatch> 6770 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6771 Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 6772 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6773 Dispatch const & d ) const 6774 { 6775 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6776 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6777 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && 6778 "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6779 # endif 6780 6781 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 6782 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion( 6783 m_device, 6784 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 6785 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6786 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 6787 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); 6788 6789 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 6790 result, 6791 UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 6792 } 6793 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 6794 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6795 6796 template <typename Dispatch> destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6797 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6798 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6799 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6800 { 6801 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6802 d.vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ), 6803 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6804 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6805 } 6806 6807 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6808 template <typename Dispatch> destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6809 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6810 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6811 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6812 { 6813 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6814 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6815 VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && 6816 "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6817 # endif 6818 6819 d.vkDestroySamplerYcbcrConversion( 6820 m_device, 6821 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6822 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6823 } 6824 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6825 6826 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6827 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6828 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6829 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6830 { 6831 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6832 d.vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ), 6833 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6834 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6835 } 6836 6837 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6838 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6839 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6840 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6841 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6842 { 6843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6844 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6845 VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && 6846 "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6847 # endif 6848 6849 d.vkDestroySamplerYcbcrConversion( 6850 m_device, 6851 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6852 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6853 } 6854 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6855 6856 template <typename Dispatch> 6857 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const6858 Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 6859 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6860 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 6861 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6862 { 6863 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6864 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), 6865 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 6866 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6867 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 6868 } 6869 6870 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6871 template <typename Dispatch> 6872 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6873 Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 6874 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6875 Dispatch const & d ) const 6876 { 6877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6878 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6879 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && 6880 "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6881 # endif 6882 6883 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 6884 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate( 6885 m_device, 6886 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 6887 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6888 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 6889 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); 6890 6891 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); 6892 } 6893 6894 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6895 template <typename Dispatch> 6896 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6897 Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 6898 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6899 Dispatch const & d ) const 6900 { 6901 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6902 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6903 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && 6904 "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6905 # endif 6906 6907 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 6908 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate( 6909 m_device, 6910 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 6911 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6912 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 6913 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); 6914 6915 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 6916 UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 6917 descriptorUpdateTemplate, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 6918 } 6919 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 6920 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6921 6922 template <typename Dispatch> destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6923 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6924 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6925 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6926 { 6927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6928 d.vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), 6929 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6930 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6931 } 6932 6933 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6934 template <typename Dispatch> destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6935 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6936 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6937 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6938 { 6939 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6940 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6941 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && 6942 "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6943 # endif 6944 6945 d.vkDestroyDescriptorUpdateTemplate( 6946 m_device, 6947 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6948 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6949 } 6950 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6951 6952 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6953 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6954 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6955 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6956 { 6957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6958 d.vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), 6959 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6960 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6961 } 6962 6963 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6964 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6965 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6966 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6967 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6968 { 6969 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6970 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6971 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && 6972 "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6973 # endif 6974 6975 d.vkDestroyDescriptorUpdateTemplate( 6976 m_device, 6977 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6978 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6979 } 6980 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6981 6982 template <typename Dispatch> updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const6983 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 6984 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6985 const void * pData, 6986 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6987 { 6988 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6989 d.vkUpdateDescriptorSetWithTemplate( static_cast<VkDevice>( m_device ), 6990 static_cast<VkDescriptorSet>( descriptorSet ), 6991 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6992 pData ); 6993 } 6994 6995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6996 template <typename DataType, typename Dispatch> updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,DataType const & data,Dispatch const & d) const6997 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 6998 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6999 DataType const & data, 7000 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7001 { 7002 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7003 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7004 VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplate && 7005 "Function <vkUpdateDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 7006 # endif 7007 7008 d.vkUpdateDescriptorSetWithTemplate( m_device, 7009 static_cast<VkDescriptorSet>( descriptorSet ), 7010 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 7011 reinterpret_cast<const void *>( &data ) ); 7012 } 7013 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7014 7015 template <typename Dispatch> getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const7016 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 7017 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 7018 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7019 { 7020 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7021 d.vkGetPhysicalDeviceExternalBufferProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), 7022 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 7023 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 7024 } 7025 7026 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7027 template <typename Dispatch> 7028 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const7029 PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, 7030 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7031 { 7032 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7033 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7034 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferProperties && 7035 "Function <vkGetPhysicalDeviceExternalBufferProperties> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" ); 7036 # endif 7037 7038 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 7039 d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, 7040 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 7041 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 7042 7043 return externalBufferProperties; 7044 } 7045 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7046 7047 template <typename Dispatch> getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const7048 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 7049 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 7050 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7051 { 7052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7053 d.vkGetPhysicalDeviceExternalFenceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), 7054 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 7055 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 7056 } 7057 7058 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7059 template <typename Dispatch> 7060 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const7061 PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, 7062 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7063 { 7064 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7065 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7066 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFenceProperties && 7067 "Function <vkGetPhysicalDeviceExternalFenceProperties> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" ); 7068 # endif 7069 7070 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 7071 d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, 7072 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 7073 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 7074 7075 return externalFenceProperties; 7076 } 7077 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7078 7079 template <typename Dispatch> 7080 VULKAN_HPP_INLINE void getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const7081 PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 7082 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 7083 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7084 { 7085 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7086 d.vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), 7087 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 7088 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 7089 } 7090 7091 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7092 template <typename Dispatch> 7093 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const7094 PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, 7095 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7096 { 7097 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7098 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7099 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphoreProperties && 7100 "Function <vkGetPhysicalDeviceExternalSemaphoreProperties> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" ); 7101 # endif 7102 7103 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 7104 d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, 7105 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 7106 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 7107 7108 return externalSemaphoreProperties; 7109 } 7110 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7111 7112 template <typename Dispatch> getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const7113 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 7114 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 7115 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7116 { 7117 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7118 d.vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ), 7119 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), 7120 reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 7121 } 7122 7123 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7124 template <typename Dispatch> 7125 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const7126 Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 7127 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7128 { 7129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7130 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7131 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 7132 # endif 7133 7134 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 7135 d.vkGetDescriptorSetLayoutSupport( 7136 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 7137 7138 return support; 7139 } 7140 7141 template <typename X, typename Y, typename... Z, typename Dispatch> 7142 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const7143 Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 7144 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7145 { 7146 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7147 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7148 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 7149 # endif 7150 7151 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 7152 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 7153 d.vkGetDescriptorSetLayoutSupport( 7154 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 7155 7156 return structureChain; 7157 } 7158 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7159 7160 //=== VK_VERSION_1_2 === 7161 7162 template <typename Dispatch> drawIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const7163 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 7164 VULKAN_HPP_NAMESPACE::DeviceSize offset, 7165 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 7166 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 7167 uint32_t maxDrawCount, 7168 uint32_t stride, 7169 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7170 { 7171 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7172 d.vkCmdDrawIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ), 7173 static_cast<VkBuffer>( buffer ), 7174 static_cast<VkDeviceSize>( offset ), 7175 static_cast<VkBuffer>( countBuffer ), 7176 static_cast<VkDeviceSize>( countBufferOffset ), 7177 maxDrawCount, 7178 stride ); 7179 } 7180 7181 template <typename Dispatch> drawIndexedIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const7182 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 7183 VULKAN_HPP_NAMESPACE::DeviceSize offset, 7184 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 7185 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 7186 uint32_t maxDrawCount, 7187 uint32_t stride, 7188 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7189 { 7190 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7191 d.vkCmdDrawIndexedIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ), 7192 static_cast<VkBuffer>( buffer ), 7193 static_cast<VkDeviceSize>( offset ), 7194 static_cast<VkBuffer>( countBuffer ), 7195 static_cast<VkDeviceSize>( countBufferOffset ), 7196 maxDrawCount, 7197 stride ); 7198 } 7199 7200 template <typename Dispatch> createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const7201 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 7202 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7203 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 7204 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7205 { 7206 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7207 return static_cast<Result>( d.vkCreateRenderPass2( static_cast<VkDevice>( m_device ), 7208 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 7209 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7210 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 7211 } 7212 7213 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7214 template <typename Dispatch> 7215 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7216 Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 7217 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7218 Dispatch const & d ) const 7219 { 7220 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7221 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7222 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7223 # endif 7224 7225 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 7226 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7227 d.vkCreateRenderPass2( m_device, 7228 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 7229 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7230 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 7231 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); 7232 7233 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); 7234 } 7235 7236 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7237 template <typename Dispatch> 7238 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2Unique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7239 Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 7240 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7241 Dispatch const & d ) const 7242 { 7243 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7244 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7245 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7246 # endif 7247 7248 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 7249 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7250 d.vkCreateRenderPass2( m_device, 7251 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 7252 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7253 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 7254 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); 7255 7256 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 7257 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 7258 } 7259 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 7260 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7261 7262 template <typename Dispatch> beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const7263 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 7264 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 7265 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7266 { 7267 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7268 d.vkCmdBeginRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), 7269 reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), 7270 reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 7271 } 7272 7273 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7274 template <typename Dispatch> beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const7275 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 7276 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 7277 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7278 { 7279 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7280 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7281 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2 && "Function <vkCmdBeginRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7282 # endif 7283 7284 d.vkCmdBeginRenderPass2( 7285 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 7286 } 7287 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7288 7289 template <typename Dispatch> nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const7290 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 7291 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 7292 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7293 { 7294 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7295 d.vkCmdNextSubpass2( static_cast<VkCommandBuffer>( m_commandBuffer ), 7296 reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), 7297 reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 7298 } 7299 7300 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7301 template <typename Dispatch> nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const7302 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 7303 const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 7304 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7305 { 7306 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7307 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7308 VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2 && "Function <vkCmdNextSubpass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7309 # endif 7310 7311 d.vkCmdNextSubpass2( 7312 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 7313 } 7314 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7315 7316 template <typename Dispatch> endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const7317 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 7318 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7319 { 7320 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7321 d.vkCmdEndRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 7322 } 7323 7324 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7325 template <typename Dispatch> endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const7326 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 7327 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7328 { 7329 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7330 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7331 VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2 && "Function <vkCmdEndRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7332 # endif 7333 7334 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 7335 } 7336 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7337 7338 template <typename Dispatch> 7339 VULKAN_HPP_INLINE void resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const7340 Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7341 { 7342 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7343 d.vkResetQueryPool( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 7344 } 7345 7346 template <typename Dispatch> getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const7347 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 7348 uint64_t * pValue, 7349 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7350 { 7351 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7352 return static_cast<Result>( d.vkGetSemaphoreCounterValue( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), pValue ) ); 7353 } 7354 7355 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7356 template <typename Dispatch> getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const7357 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 7358 Dispatch const & d ) const 7359 { 7360 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7361 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7362 VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValue && "Function <vkGetSemaphoreCounterValue> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 7363 # endif 7364 7365 uint64_t value; 7366 VULKAN_HPP_NAMESPACE::Result result = 7367 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 7368 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); 7369 7370 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); 7371 } 7372 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7373 7374 template <typename Dispatch> waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const7375 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 7376 uint64_t timeout, 7377 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7378 { 7379 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7380 return static_cast<Result>( d.vkWaitSemaphores( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 7381 } 7382 7383 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7384 template <typename Dispatch> 7385 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const7386 Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const 7387 { 7388 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7389 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7390 VULKAN_HPP_ASSERT( d.vkWaitSemaphores && "Function <vkWaitSemaphores> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 7391 # endif 7392 7393 VULKAN_HPP_NAMESPACE::Result result = 7394 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 7395 VULKAN_HPP_NAMESPACE::detail::resultCheck( 7396 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 7397 7398 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 7399 } 7400 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7401 7402 template <typename Dispatch> signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const7403 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, 7404 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7405 { 7406 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7407 return static_cast<Result>( d.vkSignalSemaphore( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 7408 } 7409 7410 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7411 template <typename Dispatch> 7412 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,Dispatch const & d) const7413 Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 7414 { 7415 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7416 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7417 VULKAN_HPP_ASSERT( d.vkSignalSemaphore && "Function <vkSignalSemaphore> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 7418 # endif 7419 7420 VULKAN_HPP_NAMESPACE::Result result = 7421 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 7422 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); 7423 7424 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 7425 } 7426 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7427 7428 template <typename Dispatch> getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const7429 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 7430 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7431 { 7432 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7433 return static_cast<DeviceAddress>( 7434 d.vkGetBufferDeviceAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 7435 } 7436 7437 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7438 template <typename Dispatch> getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const7439 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 7440 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7441 { 7442 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7443 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7444 VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddress && 7445 "Function <vkGetBufferDeviceAddress> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 7446 # endif 7447 7448 VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 7449 7450 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 7451 } 7452 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7453 7454 template <typename Dispatch> getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const7455 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 7456 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7457 { 7458 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7459 return d.vkGetBufferOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 7460 } 7461 7462 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7463 template <typename Dispatch> getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const7464 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 7465 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7466 { 7467 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7468 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7469 VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddress && 7470 "Function <vkGetBufferOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 7471 # endif 7472 7473 uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 7474 7475 return result; 7476 } 7477 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7478 7479 template <typename Dispatch> getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const7480 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 7481 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7482 { 7483 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7484 return d.vkGetDeviceMemoryOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), 7485 reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 7486 } 7487 7488 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7489 template <typename Dispatch> getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const7490 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, 7491 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7492 { 7493 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7494 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7495 VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddress && 7496 "Function <vkGetDeviceMemoryOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 7497 # endif 7498 7499 uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 7500 7501 return result; 7502 } 7503 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7504 7505 //=== VK_VERSION_1_3 === 7506 7507 template <typename Dispatch> getToolProperties(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,Dispatch const & d) const7508 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, 7509 VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, 7510 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7511 { 7512 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7513 return static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( 7514 static_cast<VkPhysicalDevice>( m_physicalDevice ), pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); 7515 } 7516 7517 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7518 template < 7519 typename PhysicalDeviceToolPropertiesAllocator, 7520 typename Dispatch, 7521 typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, 7522 int>::type> 7523 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7524 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties(Dispatch const & d) const7525 PhysicalDevice::getToolProperties( Dispatch const & d ) const 7526 { 7527 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7528 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7529 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && 7530 "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 7531 # endif 7532 7533 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; 7534 uint32_t toolCount; 7535 VULKAN_HPP_NAMESPACE::Result result; 7536 do 7537 { 7538 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); 7539 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 7540 { 7541 toolProperties.resize( toolCount ); 7542 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7543 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 7544 } 7545 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7546 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); 7547 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 7548 if ( toolCount < toolProperties.size() ) 7549 { 7550 toolProperties.resize( toolCount ); 7551 } 7552 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 7553 } 7554 7555 template < 7556 typename PhysicalDeviceToolPropertiesAllocator, 7557 typename Dispatch, 7558 typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, 7559 int>::type> 7560 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7561 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties(PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,Dispatch const & d) const7562 PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const 7563 { 7564 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7565 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7566 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && 7567 "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 7568 # endif 7569 7570 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( 7571 physicalDeviceToolPropertiesAllocator ); 7572 uint32_t toolCount; 7573 VULKAN_HPP_NAMESPACE::Result result; 7574 do 7575 { 7576 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); 7577 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 7578 { 7579 toolProperties.resize( toolCount ); 7580 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7581 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 7582 } 7583 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7584 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); 7585 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 7586 if ( toolCount < toolProperties.size() ) 7587 { 7588 toolProperties.resize( toolCount ); 7589 } 7590 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 7591 } 7592 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7593 7594 template <typename Dispatch> createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,Dispatch const & d) const7595 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, 7596 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7597 VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, 7598 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7599 { 7600 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7601 return static_cast<Result>( d.vkCreatePrivateDataSlot( static_cast<VkDevice>( m_device ), 7602 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), 7603 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7604 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); 7605 } 7606 7607 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7608 template <typename Dispatch> 7609 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7610 Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 7611 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7612 Dispatch const & d ) const 7613 { 7614 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7615 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7616 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7617 # endif 7618 7619 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 7620 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7621 d.vkCreatePrivateDataSlot( m_device, 7622 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 7623 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7624 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 7625 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); 7626 7627 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); 7628 } 7629 7630 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7631 template <typename Dispatch> 7632 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7633 Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 7634 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7635 Dispatch const & d ) const 7636 { 7637 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7638 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7639 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7640 # endif 7641 7642 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 7643 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7644 d.vkCreatePrivateDataSlot( m_device, 7645 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 7646 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7647 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 7648 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); 7649 7650 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 7651 result, 7652 UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 7653 } 7654 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 7655 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7656 7657 template <typename Dispatch> destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7658 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7659 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7660 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7661 { 7662 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7663 d.vkDestroyPrivateDataSlot( 7664 static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7665 } 7666 7667 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7668 template <typename Dispatch> destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7669 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7670 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7671 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7672 { 7673 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7674 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7675 VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7676 # endif 7677 7678 d.vkDestroyPrivateDataSlot( 7679 m_device, 7680 static_cast<VkPrivateDataSlot>( privateDataSlot ), 7681 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7682 } 7683 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7684 7685 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7686 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7687 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7688 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7689 { 7690 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7691 d.vkDestroyPrivateDataSlot( 7692 static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7693 } 7694 7695 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7696 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7697 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7698 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7699 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7700 { 7701 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7702 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7703 VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7704 # endif 7705 7706 d.vkDestroyPrivateDataSlot( 7707 m_device, 7708 static_cast<VkPrivateDataSlot>( privateDataSlot ), 7709 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7710 } 7711 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7712 7713 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 7714 template <typename Dispatch> setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const7715 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7716 uint64_t objectHandle, 7717 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7718 uint64_t data, 7719 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7720 { 7721 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7722 return static_cast<Result>( d.vkSetPrivateData( 7723 static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 7724 } 7725 #else 7726 template <typename Dispatch> setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const7727 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7728 uint64_t objectHandle, 7729 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7730 uint64_t data, 7731 Dispatch const & d ) const 7732 { 7733 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7734 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7735 VULKAN_HPP_ASSERT( d.vkSetPrivateData && "Function <vkSetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7736 # endif 7737 7738 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7739 d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 7740 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); 7741 7742 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 7743 } 7744 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7745 7746 template <typename Dispatch> getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t * pData,Dispatch const & d) const7747 VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7748 uint64_t objectHandle, 7749 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7750 uint64_t * pData, 7751 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7752 { 7753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7754 d.vkGetPrivateData( 7755 static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); 7756 } 7757 7758 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7759 template <typename Dispatch> getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Dispatch const & d) const7760 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7761 uint64_t objectHandle, 7762 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7763 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7764 { 7765 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7766 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7767 VULKAN_HPP_ASSERT( d.vkGetPrivateData && "Function <vkGetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7768 # endif 7769 7770 uint64_t data; 7771 d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); 7772 7773 return data; 7774 } 7775 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7776 7777 template <typename Dispatch> setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const7778 VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, 7779 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 7780 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7781 { 7782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7783 d.vkCmdSetEvent2( 7784 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 7785 } 7786 7787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7788 template <typename Dispatch> setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const7789 VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, 7790 const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 7791 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7792 { 7793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7794 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7795 VULKAN_HPP_ASSERT( d.vkCmdSetEvent2 && "Function <vkCmdSetEvent2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7796 # endif 7797 7798 d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 7799 } 7800 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7801 7802 template <typename Dispatch> resetEvent2(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const7803 VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, 7804 VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, 7805 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7806 { 7807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7808 d.vkCmdResetEvent2( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); 7809 } 7810 7811 template <typename Dispatch> waitEvents2(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const7812 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, 7813 const VULKAN_HPP_NAMESPACE::Event * pEvents, 7814 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, 7815 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7816 { 7817 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7818 d.vkCmdWaitEvents2( static_cast<VkCommandBuffer>( m_commandBuffer ), 7819 eventCount, 7820 reinterpret_cast<const VkEvent *>( pEvents ), 7821 reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); 7822 } 7823 7824 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7825 template <typename Dispatch> waitEvents2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const7826 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 7827 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, 7828 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 7829 { 7830 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7831 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7832 VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2 && "Function <vkCmdWaitEvents2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7833 # endif 7834 # ifdef VULKAN_HPP_NO_EXCEPTIONS 7835 VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); 7836 # else 7837 if ( events.size() != dependencyInfos.size() ) 7838 { 7839 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); 7840 } 7841 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 7842 7843 d.vkCmdWaitEvents2( m_commandBuffer, 7844 events.size(), 7845 reinterpret_cast<const VkEvent *>( events.data() ), 7846 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); 7847 } 7848 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7849 7850 template <typename Dispatch> pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const7851 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 7852 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7853 { 7854 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7855 d.vkCmdPipelineBarrier2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 7856 } 7857 7858 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7859 template <typename Dispatch> pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const7860 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 7861 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7862 { 7863 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7864 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7865 VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2 && "Function <vkCmdPipelineBarrier2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7866 # endif 7867 7868 d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 7869 } 7870 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7871 7872 template <typename Dispatch> writeTimestamp2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const7873 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 7874 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 7875 uint32_t query, 7876 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7877 { 7878 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7879 d.vkCmdWriteTimestamp2( 7880 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); 7881 } 7882 7883 template <typename Dispatch> submit2(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const7884 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, 7885 const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, 7886 VULKAN_HPP_NAMESPACE::Fence fence, 7887 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7888 { 7889 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7890 return static_cast<Result>( 7891 d.vkQueueSubmit2( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 7892 } 7893 7894 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7895 template <typename Dispatch> submit2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const7896 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2( 7897 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 7898 { 7899 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7900 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7901 VULKAN_HPP_ASSERT( d.vkQueueSubmit2 && "Function <vkQueueSubmit2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7902 # endif 7903 7904 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7905 d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 7906 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); 7907 7908 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 7909 } 7910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7911 7912 template <typename Dispatch> copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const7913 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, 7914 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7915 { 7916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7917 d.vkCmdCopyBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); 7918 } 7919 7920 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7921 template <typename Dispatch> copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const7922 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, 7923 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7924 { 7925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7926 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7927 VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2 && "Function <vkCmdCopyBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7928 # endif 7929 7930 d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); 7931 } 7932 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7933 7934 template <typename Dispatch> copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const7935 VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7936 { 7937 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7938 d.vkCmdCopyImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); 7939 } 7940 7941 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7942 template <typename Dispatch> copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const7943 VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7944 { 7945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7946 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7947 VULKAN_HPP_ASSERT( d.vkCmdCopyImage2 && "Function <vkCmdCopyImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7948 # endif 7949 7950 d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); 7951 } 7952 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7953 7954 template <typename Dispatch> copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const7955 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, 7956 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7957 { 7958 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7959 d.vkCmdCopyBufferToImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); 7960 } 7961 7962 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7963 template <typename Dispatch> copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const7964 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, 7965 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7966 { 7967 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7968 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7969 VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2 && "Function <vkCmdCopyBufferToImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7970 # endif 7971 7972 d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); 7973 } 7974 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7975 7976 template <typename Dispatch> copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const7977 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, 7978 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7979 { 7980 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7981 d.vkCmdCopyImageToBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); 7982 } 7983 7984 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7985 template <typename Dispatch> copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const7986 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, 7987 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7988 { 7989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7990 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7991 VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2 && "Function <vkCmdCopyImageToBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7992 # endif 7993 7994 d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); 7995 } 7996 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7997 7998 template <typename Dispatch> blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const7999 VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8000 { 8001 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8002 d.vkCmdBlitImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); 8003 } 8004 8005 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8006 template <typename Dispatch> blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const8007 VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8008 { 8009 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8010 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8011 VULKAN_HPP_ASSERT( d.vkCmdBlitImage2 && "Function <vkCmdBlitImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 8012 # endif 8013 8014 d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); 8015 } 8016 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8017 8018 template <typename Dispatch> resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const8019 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, 8020 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8021 { 8022 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8023 d.vkCmdResolveImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); 8024 } 8025 8026 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8027 template <typename Dispatch> resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const8028 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, 8029 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8030 { 8031 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8032 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8033 VULKAN_HPP_ASSERT( d.vkCmdResolveImage2 && "Function <vkCmdResolveImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 8034 # endif 8035 8036 d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); 8037 } 8038 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8039 8040 template <typename Dispatch> beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,Dispatch const & d) const8041 VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 8042 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8043 { 8044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8045 d.vkCmdBeginRendering( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); 8046 } 8047 8048 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8049 template <typename Dispatch> beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const8050 VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, 8051 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8052 { 8053 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8054 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8055 VULKAN_HPP_ASSERT( d.vkCmdBeginRendering && "Function <vkCmdBeginRendering> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" ); 8056 # endif 8057 8058 d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); 8059 } 8060 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8061 8062 template <typename Dispatch> endRendering(Dispatch const & d) const8063 VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8064 { 8065 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8066 d.vkCmdEndRendering( static_cast<VkCommandBuffer>( m_commandBuffer ) ); 8067 } 8068 8069 template <typename Dispatch> setCullMode(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const8070 VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8071 { 8072 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8073 d.vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) ); 8074 } 8075 8076 template <typename Dispatch> setFrontFace(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const8077 VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8078 { 8079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8080 d.vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) ); 8081 } 8082 8083 template <typename Dispatch> setPrimitiveTopology(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const8084 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, 8085 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8086 { 8087 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8088 d.vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) ); 8089 } 8090 8091 template <typename Dispatch> setViewportWithCount(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const8092 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, 8093 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 8094 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8095 { 8096 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8097 d.vkCmdSetViewportWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 8098 } 8099 8100 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8101 template <typename Dispatch> setViewportWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const8102 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 8103 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8104 { 8105 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8106 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8107 VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCount && 8108 "Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 8109 # endif 8110 8111 d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 8112 } 8113 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8114 8115 template <typename Dispatch> 8116 VULKAN_HPP_INLINE void setScissorWithCount(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const8117 CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8118 { 8119 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8120 d.vkCmdSetScissorWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 8121 } 8122 8123 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8124 template <typename Dispatch> setScissorWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const8125 VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 8126 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8127 { 8128 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8129 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8130 VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCount && 8131 "Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 8132 # endif 8133 8134 d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 8135 } 8136 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8137 8138 template <typename Dispatch> bindVertexBuffers2(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const8139 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, 8140 uint32_t bindingCount, 8141 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 8142 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 8143 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 8144 const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, 8145 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8146 { 8147 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8148 d.vkCmdBindVertexBuffers2( static_cast<VkCommandBuffer>( m_commandBuffer ), 8149 firstBinding, 8150 bindingCount, 8151 reinterpret_cast<const VkBuffer *>( pBuffers ), 8152 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 8153 reinterpret_cast<const VkDeviceSize *>( pSizes ), 8154 reinterpret_cast<const VkDeviceSize *>( pStrides ) ); 8155 } 8156 8157 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8158 template <typename Dispatch> bindVertexBuffers2(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const8159 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, 8160 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 8161 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 8162 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 8163 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, 8164 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 8165 { 8166 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8167 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8168 VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2 && 8169 "Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 8170 # endif 8171 # ifdef VULKAN_HPP_NO_EXCEPTIONS 8172 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 8173 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 8174 VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); 8175 # else 8176 if ( buffers.size() != offsets.size() ) 8177 { 8178 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); 8179 } 8180 if ( !sizes.empty() && buffers.size() != sizes.size() ) 8181 { 8182 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); 8183 } 8184 if ( !strides.empty() && buffers.size() != strides.size() ) 8185 { 8186 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); 8187 } 8188 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 8189 8190 d.vkCmdBindVertexBuffers2( m_commandBuffer, 8191 firstBinding, 8192 buffers.size(), 8193 reinterpret_cast<const VkBuffer *>( buffers.data() ), 8194 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 8195 reinterpret_cast<const VkDeviceSize *>( sizes.data() ), 8196 reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); 8197 } 8198 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8199 8200 template <typename Dispatch> setDepthTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const8201 VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8202 { 8203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8204 d.vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) ); 8205 } 8206 8207 template <typename Dispatch> setDepthWriteEnable(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const8208 VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8209 { 8210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8211 d.vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) ); 8212 } 8213 8214 template <typename Dispatch> setDepthCompareOp(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const8215 VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8216 { 8217 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8218 d.vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) ); 8219 } 8220 8221 template <typename Dispatch> setDepthBoundsTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const8222 VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, 8223 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8224 { 8225 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8226 d.vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) ); 8227 } 8228 8229 template <typename Dispatch> setStencilTestEnable(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const8230 VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8231 { 8232 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8233 d.vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) ); 8234 } 8235 8236 template <typename Dispatch> setStencilOp(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const8237 VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 8238 VULKAN_HPP_NAMESPACE::StencilOp failOp, 8239 VULKAN_HPP_NAMESPACE::StencilOp passOp, 8240 VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, 8241 VULKAN_HPP_NAMESPACE::CompareOp compareOp, 8242 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8243 { 8244 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8245 d.vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ), 8246 static_cast<VkStencilFaceFlags>( faceMask ), 8247 static_cast<VkStencilOp>( failOp ), 8248 static_cast<VkStencilOp>( passOp ), 8249 static_cast<VkStencilOp>( depthFailOp ), 8250 static_cast<VkCompareOp>( compareOp ) ); 8251 } 8252 8253 template <typename Dispatch> setRasterizerDiscardEnable(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const8254 VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, 8255 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8256 { 8257 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8258 d.vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) ); 8259 } 8260 8261 template <typename Dispatch> setDepthBiasEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const8262 VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8263 { 8264 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8265 d.vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) ); 8266 } 8267 8268 template <typename Dispatch> setPrimitiveRestartEnable(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const8269 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, 8270 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8271 { 8272 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8273 d.vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) ); 8274 } 8275 8276 template <typename Dispatch> getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const8277 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, 8278 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 8279 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8280 { 8281 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8282 d.vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ), 8283 reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), 8284 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 8285 } 8286 8287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8288 template <typename Dispatch> 8289 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const8290 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8291 { 8292 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8293 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8294 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && 8295 "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8296 # endif 8297 8298 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 8299 d.vkGetDeviceBufferMemoryRequirements( 8300 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8301 8302 return memoryRequirements; 8303 } 8304 8305 template <typename X, typename Y, typename... Z, typename Dispatch> 8306 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const8307 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8308 { 8309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8310 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8311 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && 8312 "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8313 # endif 8314 8315 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8316 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 8317 d.vkGetDeviceBufferMemoryRequirements( 8318 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8319 8320 return structureChain; 8321 } 8322 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8323 8324 template <typename Dispatch> getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const8325 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 8326 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 8327 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8328 { 8329 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8330 d.vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ), 8331 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 8332 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 8333 } 8334 8335 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8336 template <typename Dispatch> 8337 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const8338 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8339 { 8340 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8341 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8342 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && 8343 "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8344 # endif 8345 8346 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 8347 d.vkGetDeviceImageMemoryRequirements( 8348 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8349 8350 return memoryRequirements; 8351 } 8352 8353 template <typename X, typename Y, typename... Z, typename Dispatch> 8354 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const8355 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8356 { 8357 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8358 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8359 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && 8360 "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8361 # endif 8362 8363 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8364 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 8365 d.vkGetDeviceImageMemoryRequirements( 8366 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8367 8368 return structureChain; 8369 } 8370 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8371 8372 template <typename Dispatch> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const8373 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 8374 uint32_t * pSparseMemoryRequirementCount, 8375 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 8376 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8377 { 8378 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8379 d.vkGetDeviceImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), 8380 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 8381 pSparseMemoryRequirementCount, 8382 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 8383 } 8384 8385 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8386 template <typename SparseImageMemoryRequirements2Allocator, 8387 typename Dispatch, 8388 typename std::enable_if< 8389 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 8390 int>::type> 8391 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const8392 Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const 8393 { 8394 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8395 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8396 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && 8397 "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8398 # endif 8399 8400 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 8401 uint32_t sparseMemoryRequirementCount; 8402 d.vkGetDeviceImageSparseMemoryRequirements( 8403 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 8404 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8405 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 8406 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 8407 &sparseMemoryRequirementCount, 8408 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 8409 8410 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 8411 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 8412 { 8413 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8414 } 8415 return sparseMemoryRequirements; 8416 } 8417 8418 template <typename SparseImageMemoryRequirements2Allocator, 8419 typename Dispatch, 8420 typename std::enable_if< 8421 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 8422 int>::type> 8423 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const8424 Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, 8425 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 8426 Dispatch const & d ) const 8427 { 8428 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8429 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8430 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && 8431 "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8432 # endif 8433 8434 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 8435 sparseImageMemoryRequirements2Allocator ); 8436 uint32_t sparseMemoryRequirementCount; 8437 d.vkGetDeviceImageSparseMemoryRequirements( 8438 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 8439 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8440 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 8441 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 8442 &sparseMemoryRequirementCount, 8443 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 8444 8445 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 8446 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 8447 { 8448 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8449 } 8450 return sparseMemoryRequirements; 8451 } 8452 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8453 8454 //=== VK_VERSION_1_4 === 8455 8456 template <typename Dispatch> setLineStipple(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const8457 VULKAN_HPP_INLINE void CommandBuffer::setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8458 { 8459 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8460 d.vkCmdSetLineStipple( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); 8461 } 8462 8463 template <typename Dispatch> mapMemory2(const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo,void ** ppData,Dispatch const & d) const8464 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, 8465 void ** ppData, 8466 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8467 { 8468 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8469 return static_cast<Result>( d.vkMapMemory2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryMapInfo *>( pMemoryMapInfo ), ppData ) ); 8470 } 8471 8472 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8473 template <typename Dispatch> mapMemory2(const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo,Dispatch const & d) const8474 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, 8475 Dispatch const & d ) const 8476 { 8477 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8478 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8479 VULKAN_HPP_ASSERT( d.vkMapMemory2 && "Function <vkMapMemory2> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); 8480 # endif 8481 8482 void * pData; 8483 VULKAN_HPP_NAMESPACE::Result result = 8484 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory2( m_device, reinterpret_cast<const VkMemoryMapInfo *>( &memoryMapInfo ), &pData ) ); 8485 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2" ); 8486 8487 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); 8488 } 8489 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8490 8491 template <typename Dispatch> unmapMemory2(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo,Dispatch const & d) const8492 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, 8493 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8494 { 8495 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8496 return static_cast<Result>( d.vkUnmapMemory2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfo *>( pMemoryUnmapInfo ) ) ); 8497 } 8498 8499 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8500 template <typename Dispatch> unmapMemory2(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo,Dispatch const & d) const8501 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, 8502 Dispatch const & d ) const 8503 { 8504 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8505 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8506 VULKAN_HPP_ASSERT( d.vkUnmapMemory2 && "Function <vkUnmapMemory2> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); 8507 # endif 8508 8509 VULKAN_HPP_NAMESPACE::Result result = 8510 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkUnmapMemory2( m_device, reinterpret_cast<const VkMemoryUnmapInfo *>( &memoryUnmapInfo ) ) ); 8511 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2" ); 8512 8513 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8514 } 8515 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8516 8517 template <typename Dispatch> bindIndexBuffer2(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const8518 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, 8519 VULKAN_HPP_NAMESPACE::DeviceSize offset, 8520 VULKAN_HPP_NAMESPACE::DeviceSize size, 8521 VULKAN_HPP_NAMESPACE::IndexType indexType, 8522 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8523 { 8524 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8525 d.vkCmdBindIndexBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), 8526 static_cast<VkBuffer>( buffer ), 8527 static_cast<VkDeviceSize>( offset ), 8528 static_cast<VkDeviceSize>( size ), 8529 static_cast<VkIndexType>( indexType ) ); 8530 } 8531 8532 template <typename Dispatch> getRenderingAreaGranularity(const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const8533 VULKAN_HPP_INLINE void Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, 8534 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 8535 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8536 { 8537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8538 d.vkGetRenderingAreaGranularity( 8539 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkRenderingAreaInfo *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 8540 } 8541 8542 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8543 template <typename Dispatch> 8544 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D getRenderingAreaGranularity(const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo,Dispatch const & d) const8545 Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8546 { 8547 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8548 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8549 VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularity && "Function <vkGetRenderingAreaGranularity> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8550 # endif 8551 8552 VULKAN_HPP_NAMESPACE::Extent2D granularity; 8553 d.vkGetRenderingAreaGranularity( 8554 m_device, reinterpret_cast<const VkRenderingAreaInfo *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 8555 8556 return granularity; 8557 } 8558 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8559 8560 template <typename Dispatch> getImageSubresourceLayout(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const8561 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, 8562 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 8563 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8564 { 8565 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8566 d.vkGetDeviceImageSubresourceLayout( 8567 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageSubresourceInfo *>( pInfo ), reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 8568 } 8569 8570 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8571 template <typename Dispatch> 8572 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 getImageSubresourceLayout(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info,Dispatch const & d) const8573 Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8574 { 8575 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8576 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8577 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && 8578 "Function <vkGetDeviceImageSubresourceLayout> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8579 # endif 8580 8581 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 8582 d.vkGetDeviceImageSubresourceLayout( 8583 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 8584 8585 return layout; 8586 } 8587 8588 template <typename X, typename Y, typename... Z, typename Dispatch> 8589 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageSubresourceLayout(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info,Dispatch const & d) const8590 Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8591 { 8592 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8593 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8594 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && 8595 "Function <vkGetDeviceImageSubresourceLayout> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8596 # endif 8597 8598 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8599 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 8600 d.vkGetDeviceImageSubresourceLayout( 8601 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 8602 8603 return structureChain; 8604 } 8605 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8606 8607 template <typename Dispatch> getImageSubresourceLayout2(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const8608 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, 8609 const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, 8610 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 8611 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8612 { 8613 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8614 d.vkGetImageSubresourceLayout2( static_cast<VkDevice>( m_device ), 8615 static_cast<VkImage>( image ), 8616 reinterpret_cast<const VkImageSubresource2 *>( pSubresource ), 8617 reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 8618 } 8619 8620 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8621 template <typename Dispatch> getImageSubresourceLayout2(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const8622 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2( 8623 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8624 { 8625 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8626 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8627 VULKAN_HPP_ASSERT( 8628 d.vkGetImageSubresourceLayout2 && 8629 "Function <vkGetImageSubresourceLayout2> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8630 # endif 8631 8632 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 8633 d.vkGetImageSubresourceLayout2( m_device, 8634 static_cast<VkImage>( image ), 8635 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 8636 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 8637 8638 return layout; 8639 } 8640 8641 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const8642 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2( 8643 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8644 { 8645 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8646 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8647 VULKAN_HPP_ASSERT( 8648 d.vkGetImageSubresourceLayout2 && 8649 "Function <vkGetImageSubresourceLayout2> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 8650 # endif 8651 8652 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8653 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 8654 d.vkGetImageSubresourceLayout2( m_device, 8655 static_cast<VkImage>( image ), 8656 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 8657 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 8658 8659 return structureChain; 8660 } 8661 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8662 8663 template <typename Dispatch> pushDescriptorSet(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,Dispatch const & d) const8664 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 8665 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 8666 uint32_t set, 8667 uint32_t descriptorWriteCount, 8668 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 8669 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8670 { 8671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8672 d.vkCmdPushDescriptorSet( static_cast<VkCommandBuffer>( m_commandBuffer ), 8673 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 8674 static_cast<VkPipelineLayout>( layout ), 8675 set, 8676 descriptorWriteCount, 8677 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); 8678 } 8679 8680 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8681 template <typename Dispatch> 8682 VULKAN_HPP_INLINE void pushDescriptorSet(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,Dispatch const & d) const8683 CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 8684 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 8685 uint32_t set, 8686 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 8687 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8688 { 8689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8690 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8691 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet && "Function <vkCmdPushDescriptorSet> requires <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); 8692 # endif 8693 8694 d.vkCmdPushDescriptorSet( m_commandBuffer, 8695 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 8696 static_cast<VkPipelineLayout>( layout ), 8697 set, 8698 descriptorWrites.size(), 8699 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); 8700 } 8701 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8702 8703 template <typename Dispatch> pushDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,const void * pData,Dispatch const & d) const8704 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 8705 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 8706 uint32_t set, 8707 const void * pData, 8708 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8709 { 8710 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8711 d.vkCmdPushDescriptorSetWithTemplate( static_cast<VkCommandBuffer>( m_commandBuffer ), 8712 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 8713 static_cast<VkPipelineLayout>( layout ), 8714 set, 8715 pData ); 8716 } 8717 8718 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8719 template <typename DataType, typename Dispatch> pushDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,DataType const & data,Dispatch const & d) const8720 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 8721 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 8722 uint32_t set, 8723 DataType const & data, 8724 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8725 { 8726 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8727 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8728 VULKAN_HPP_ASSERT( 8729 d.vkCmdPushDescriptorSetWithTemplate && 8730 "Function <vkCmdPushDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); 8731 # endif 8732 8733 d.vkCmdPushDescriptorSetWithTemplate( m_commandBuffer, 8734 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 8735 static_cast<VkPipelineLayout>( layout ), 8736 set, 8737 reinterpret_cast<const void *>( &data ) ); 8738 } 8739 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8740 8741 template <typename Dispatch> setRenderingAttachmentLocations(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo,Dispatch const & d) const8742 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, 8743 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8744 { 8745 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8746 d.vkCmdSetRenderingAttachmentLocations( static_cast<VkCommandBuffer>( m_commandBuffer ), 8747 reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( pLocationInfo ) ); 8748 } 8749 8750 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8751 template <typename Dispatch> setRenderingAttachmentLocations(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo,Dispatch const & d) const8752 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, 8753 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8754 { 8755 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8756 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8757 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocations && 8758 "Function <vkCmdSetRenderingAttachmentLocations> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); 8759 # endif 8760 8761 d.vkCmdSetRenderingAttachmentLocations( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( &locationInfo ) ); 8762 } 8763 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8764 8765 template <typename Dispatch> 8766 VULKAN_HPP_INLINE void setRenderingInputAttachmentIndices(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo,Dispatch const & d) const8767 CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, 8768 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8769 { 8770 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8771 d.vkCmdSetRenderingInputAttachmentIndices( static_cast<VkCommandBuffer>( m_commandBuffer ), 8772 reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( pInputAttachmentIndexInfo ) ); 8773 } 8774 8775 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8776 template <typename Dispatch> 8777 VULKAN_HPP_INLINE void setRenderingInputAttachmentIndices(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo,Dispatch const & d) const8778 CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, 8779 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8780 { 8781 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8782 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8783 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndices && 8784 "Function <vkCmdSetRenderingInputAttachmentIndices> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); 8785 # endif 8786 8787 d.vkCmdSetRenderingInputAttachmentIndices( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( &inputAttachmentIndexInfo ) ); 8788 } 8789 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8790 8791 template <typename Dispatch> bindDescriptorSets2(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo,Dispatch const & d) const8792 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, 8793 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8794 { 8795 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8796 d.vkCmdBindDescriptorSets2( static_cast<VkCommandBuffer>( m_commandBuffer ), 8797 reinterpret_cast<const VkBindDescriptorSetsInfo *>( pBindDescriptorSetsInfo ) ); 8798 } 8799 8800 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8801 template <typename Dispatch> bindDescriptorSets2(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo,Dispatch const & d) const8802 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, 8803 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8804 { 8805 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8806 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8807 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2 && "Function <vkCmdBindDescriptorSets2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 8808 # endif 8809 8810 d.vkCmdBindDescriptorSets2( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfo *>( &bindDescriptorSetsInfo ) ); 8811 } 8812 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8813 8814 template <typename Dispatch> pushConstants2(const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo,Dispatch const & d) const8815 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, 8816 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8817 { 8818 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8819 d.vkCmdPushConstants2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPushConstantsInfo *>( pPushConstantsInfo ) ); 8820 } 8821 8822 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8823 template <typename Dispatch> pushConstants2(const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo,Dispatch const & d) const8824 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, 8825 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8826 { 8827 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8828 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8829 VULKAN_HPP_ASSERT( d.vkCmdPushConstants2 && "Function <vkCmdPushConstants2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 8830 # endif 8831 8832 d.vkCmdPushConstants2( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfo *>( &pushConstantsInfo ) ); 8833 } 8834 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8835 8836 template <typename Dispatch> pushDescriptorSet2(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo,Dispatch const & d) const8837 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, 8838 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8839 { 8840 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8841 d.vkCmdPushDescriptorSet2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPushDescriptorSetInfo *>( pPushDescriptorSetInfo ) ); 8842 } 8843 8844 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8845 template <typename Dispatch> pushDescriptorSet2(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo,Dispatch const & d) const8846 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, 8847 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8848 { 8849 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8850 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8851 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2 && "Function <vkCmdPushDescriptorSet2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 8852 # endif 8853 8854 d.vkCmdPushDescriptorSet2( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfo *>( &pushDescriptorSetInfo ) ); 8855 } 8856 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8857 8858 template <typename Dispatch> 8859 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo,Dispatch const & d) const8860 CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, 8861 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8862 { 8863 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8864 d.vkCmdPushDescriptorSetWithTemplate2( static_cast<VkCommandBuffer>( m_commandBuffer ), 8865 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( pPushDescriptorSetWithTemplateInfo ) ); 8866 } 8867 8868 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8869 template <typename Dispatch> 8870 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo,Dispatch const & d) const8871 CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, 8872 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8873 { 8874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8875 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8876 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2 && 8877 "Function <vkCmdPushDescriptorSetWithTemplate2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 8878 # endif 8879 8880 d.vkCmdPushDescriptorSetWithTemplate2( m_commandBuffer, 8881 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( &pushDescriptorSetWithTemplateInfo ) ); 8882 } 8883 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8884 8885 template <typename Dispatch> copyMemoryToImage(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo,Dispatch const & d) const8886 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, 8887 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8888 { 8889 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8890 return static_cast<Result>( 8891 d.vkCopyMemoryToImage( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyMemoryToImageInfo *>( pCopyMemoryToImageInfo ) ) ); 8892 } 8893 8894 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8895 template <typename Dispatch> 8896 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyMemoryToImage(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo,Dispatch const & d) const8897 Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const 8898 { 8899 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8900 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8901 VULKAN_HPP_ASSERT( d.vkCopyMemoryToImage && "Function <vkCopyMemoryToImage> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 8902 # endif 8903 8904 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8905 d.vkCopyMemoryToImage( m_device, reinterpret_cast<const VkCopyMemoryToImageInfo *>( ©MemoryToImageInfo ) ) ); 8906 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImage" ); 8907 8908 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8909 } 8910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8911 8912 template <typename Dispatch> copyImageToMemory(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo,Dispatch const & d) const8913 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, 8914 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8915 { 8916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8917 return static_cast<Result>( 8918 d.vkCopyImageToMemory( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToMemoryInfo *>( pCopyImageToMemoryInfo ) ) ); 8919 } 8920 8921 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8922 template <typename Dispatch> 8923 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToMemory(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo,Dispatch const & d) const8924 Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const 8925 { 8926 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8927 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8928 VULKAN_HPP_ASSERT( d.vkCopyImageToMemory && "Function <vkCopyImageToMemory> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 8929 # endif 8930 8931 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8932 d.vkCopyImageToMemory( m_device, reinterpret_cast<const VkCopyImageToMemoryInfo *>( ©ImageToMemoryInfo ) ) ); 8933 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemory" ); 8934 8935 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8936 } 8937 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8938 8939 template <typename Dispatch> copyImageToImage(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo,Dispatch const & d) const8940 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, 8941 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8942 { 8943 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8944 return static_cast<Result>( 8945 d.vkCopyImageToImage( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToImageInfo *>( pCopyImageToImageInfo ) ) ); 8946 } 8947 8948 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8949 template <typename Dispatch> 8950 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToImage(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo,Dispatch const & d) const8951 Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const 8952 { 8953 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8954 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8955 VULKAN_HPP_ASSERT( d.vkCopyImageToImage && "Function <vkCopyImageToImage> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 8956 # endif 8957 8958 VULKAN_HPP_NAMESPACE::Result result = 8959 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyImageToImage( m_device, reinterpret_cast<const VkCopyImageToImageInfo *>( ©ImageToImageInfo ) ) ); 8960 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImage" ); 8961 8962 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8963 } 8964 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8965 8966 template <typename Dispatch> transitionImageLayout(uint32_t transitionCount,const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions,Dispatch const & d) const8967 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayout( uint32_t transitionCount, 8968 const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, 8969 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8970 { 8971 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8972 return static_cast<Result>( d.vkTransitionImageLayout( 8973 static_cast<VkDevice>( m_device ), transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( pTransitions ) ) ); 8974 } 8975 8976 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8977 template <typename Dispatch> 8978 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type transitionImageLayout(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo> const & transitions,Dispatch const & d) const8979 Device::transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo> const & transitions, 8980 Dispatch const & d ) const 8981 { 8982 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8983 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8984 VULKAN_HPP_ASSERT( d.vkTransitionImageLayout && "Function <vkTransitionImageLayout> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 8985 # endif 8986 8987 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8988 d.vkTransitionImageLayout( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( transitions.data() ) ) ); 8989 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayout" ); 8990 8991 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 8992 } 8993 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8994 8995 //=== VK_KHR_surface === 8996 8997 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8998 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8999 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9000 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9001 { 9002 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9003 d.vkDestroySurfaceKHR( 9004 static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9005 } 9006 9007 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9008 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9009 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9010 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9011 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9012 { 9013 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9014 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9015 VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" ); 9016 # endif 9017 9018 d.vkDestroySurfaceKHR( m_instance, 9019 static_cast<VkSurfaceKHR>( surface ), 9020 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9021 } 9022 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9023 9024 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9025 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9026 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9027 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9028 { 9029 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9030 d.vkDestroySurfaceKHR( 9031 static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9032 } 9033 9034 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9035 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9036 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9037 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9038 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9039 { 9040 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9041 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9042 VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" ); 9043 # endif 9044 9045 d.vkDestroySurfaceKHR( m_instance, 9046 static_cast<VkSurfaceKHR>( surface ), 9047 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9048 } 9049 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9050 9051 template <typename Dispatch> getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::Bool32 * pSupported,Dispatch const & d) const9052 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, 9053 VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9054 VULKAN_HPP_NAMESPACE::Bool32 * pSupported, 9055 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9056 { 9057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9058 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( 9059 static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) ); 9060 } 9061 9062 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9063 template <typename Dispatch> 9064 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const9065 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 9066 { 9067 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9068 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9069 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceSupportKHR && "Function <vkGetPhysicalDeviceSurfaceSupportKHR> requires <VK_KHR_surface>" ); 9070 # endif 9071 9072 VULKAN_HPP_NAMESPACE::Bool32 supported; 9073 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( 9074 m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) ) ); 9075 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); 9076 9077 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( supported ) ); 9078 } 9079 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9080 9081 template <typename Dispatch> getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,Dispatch const & d) const9082 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9083 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, 9084 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9085 { 9086 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9087 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 9088 static_cast<VkSurfaceKHR>( surface ), 9089 reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) ); 9090 } 9091 9092 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9093 template <typename Dispatch> 9094 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const9095 PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 9096 { 9097 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9098 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9099 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR && "Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> requires <VK_KHR_surface>" ); 9100 # endif 9101 9102 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; 9103 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 9104 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) ); 9105 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); 9106 9107 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); 9108 } 9109 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9110 9111 template <typename Dispatch> getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,Dispatch const & d) const9112 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9113 uint32_t * pSurfaceFormatCount, 9114 VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, 9115 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9116 { 9117 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9118 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 9119 static_cast<VkSurfaceKHR>( surface ), 9120 pSurfaceFormatCount, 9121 reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) ); 9122 } 9123 9124 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9125 template <typename SurfaceFormatKHRAllocator, 9126 typename Dispatch, 9127 typename std::enable_if<std::is_same<typename SurfaceFormatKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, int>::type> 9128 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const9129 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 9130 { 9131 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9132 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9133 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" ); 9134 # endif 9135 9136 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats; 9137 uint32_t surfaceFormatCount; 9138 VULKAN_HPP_NAMESPACE::Result result; 9139 do 9140 { 9141 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9142 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 9143 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 9144 { 9145 surfaceFormats.resize( surfaceFormatCount ); 9146 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 9147 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 9148 } 9149 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9150 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 9151 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 9152 if ( surfaceFormatCount < surfaceFormats.size() ) 9153 { 9154 surfaceFormats.resize( surfaceFormatCount ); 9155 } 9156 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 9157 } 9158 9159 template <typename SurfaceFormatKHRAllocator, 9160 typename Dispatch, 9161 typename std::enable_if<std::is_same<typename SurfaceFormatKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, int>::type> 9162 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,Dispatch const & d) const9163 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9164 SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, 9165 Dispatch const & d ) const 9166 { 9167 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9168 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9169 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" ); 9170 # endif 9171 9172 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator ); 9173 uint32_t surfaceFormatCount; 9174 VULKAN_HPP_NAMESPACE::Result result; 9175 do 9176 { 9177 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9178 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 9179 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 9180 { 9181 surfaceFormats.resize( surfaceFormatCount ); 9182 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 9183 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 9184 } 9185 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9186 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 9187 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 9188 if ( surfaceFormatCount < surfaceFormats.size() ) 9189 { 9190 surfaceFormats.resize( surfaceFormatCount ); 9191 } 9192 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 9193 } 9194 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9195 9196 template <typename Dispatch> getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const9197 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9198 uint32_t * pPresentModeCount, 9199 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 9200 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9201 { 9202 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9203 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 9204 static_cast<VkSurfaceKHR>( surface ), 9205 pPresentModeCount, 9206 reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 9207 } 9208 9209 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9210 template <typename PresentModeKHRAllocator, 9211 typename Dispatch, 9212 typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 9213 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const9214 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 9215 { 9216 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9217 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9218 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" ); 9219 # endif 9220 9221 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes; 9222 uint32_t presentModeCount; 9223 VULKAN_HPP_NAMESPACE::Result result; 9224 do 9225 { 9226 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9227 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 9228 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 9229 { 9230 presentModes.resize( presentModeCount ); 9231 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 9232 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 9233 } 9234 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9235 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 9236 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 9237 if ( presentModeCount < presentModes.size() ) 9238 { 9239 presentModes.resize( presentModeCount ); 9240 } 9241 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 9242 } 9243 9244 template <typename PresentModeKHRAllocator, 9245 typename Dispatch, 9246 typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 9247 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const9248 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9249 PresentModeKHRAllocator & presentModeKHRAllocator, 9250 Dispatch const & d ) const 9251 { 9252 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9253 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9254 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" ); 9255 # endif 9256 9257 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 9258 uint32_t presentModeCount; 9259 VULKAN_HPP_NAMESPACE::Result result; 9260 do 9261 { 9262 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9263 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 9264 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 9265 { 9266 presentModes.resize( presentModeCount ); 9267 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 9268 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 9269 } 9270 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9271 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 9272 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 9273 if ( presentModeCount < presentModes.size() ) 9274 { 9275 presentModes.resize( presentModeCount ); 9276 } 9277 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 9278 } 9279 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9280 9281 //=== VK_KHR_swapchain === 9282 9283 template <typename Dispatch> createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,Dispatch const & d) const9284 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, 9285 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9286 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, 9287 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9288 { 9289 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9290 return static_cast<Result>( d.vkCreateSwapchainKHR( static_cast<VkDevice>( m_device ), 9291 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), 9292 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9293 reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) ); 9294 } 9295 9296 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9297 template <typename Dispatch> 9298 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9299 Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 9300 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9301 Dispatch const & d ) const 9302 { 9303 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9304 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9305 VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" ); 9306 # endif 9307 9308 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 9309 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9310 d.vkCreateSwapchainKHR( m_device, 9311 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 9312 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9313 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 9314 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); 9315 9316 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); 9317 } 9318 9319 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9320 template <typename Dispatch> 9321 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9322 Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 9323 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9324 Dispatch const & d ) const 9325 { 9326 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9327 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9328 VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" ); 9329 # endif 9330 9331 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 9332 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9333 d.vkCreateSwapchainKHR( m_device, 9334 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 9335 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9336 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 9337 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); 9338 9339 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9340 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 9341 } 9342 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9343 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9344 9345 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9346 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9347 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9348 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9349 { 9350 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9351 d.vkDestroySwapchainKHR( 9352 static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9353 } 9354 9355 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9356 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9357 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9358 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9359 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9360 { 9361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9362 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9363 VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" ); 9364 # endif 9365 9366 d.vkDestroySwapchainKHR( m_device, 9367 static_cast<VkSwapchainKHR>( swapchain ), 9368 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9369 } 9370 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9371 9372 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9373 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9374 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9375 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9376 { 9377 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9378 d.vkDestroySwapchainKHR( 9379 static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9380 } 9381 9382 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9383 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9384 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9385 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9386 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9387 { 9388 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9389 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9390 VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" ); 9391 # endif 9392 9393 d.vkDestroySwapchainKHR( m_device, 9394 static_cast<VkSwapchainKHR>( swapchain ), 9395 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9396 } 9397 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9398 9399 template <typename Dispatch> getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,Dispatch const & d) const9400 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9401 uint32_t * pSwapchainImageCount, 9402 VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, 9403 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9404 { 9405 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9406 return static_cast<Result>( d.vkGetSwapchainImagesKHR( 9407 static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) ); 9408 } 9409 9410 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9411 template <typename ImageAllocator, 9412 typename Dispatch, 9413 typename std::enable_if<std::is_same<typename ImageAllocator::value_type, VULKAN_HPP_NAMESPACE::Image>::value, int>::type> 9414 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const9415 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 9416 { 9417 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9418 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9419 VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" ); 9420 # endif 9421 9422 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages; 9423 uint32_t swapchainImageCount; 9424 VULKAN_HPP_NAMESPACE::Result result; 9425 do 9426 { 9427 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9428 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 9429 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) 9430 { 9431 swapchainImages.resize( swapchainImageCount ); 9432 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR( 9433 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 9434 } 9435 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9436 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 9437 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 9438 if ( swapchainImageCount < swapchainImages.size() ) 9439 { 9440 swapchainImages.resize( swapchainImageCount ); 9441 } 9442 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); 9443 } 9444 9445 template <typename ImageAllocator, 9446 typename Dispatch, 9447 typename std::enable_if<std::is_same<typename ImageAllocator::value_type, VULKAN_HPP_NAMESPACE::Image>::value, int>::type> 9448 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,ImageAllocator & imageAllocator,Dispatch const & d) const9449 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const 9450 { 9451 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9452 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9453 VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" ); 9454 # endif 9455 9456 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator ); 9457 uint32_t swapchainImageCount; 9458 VULKAN_HPP_NAMESPACE::Result result; 9459 do 9460 { 9461 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9462 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 9463 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) 9464 { 9465 swapchainImages.resize( swapchainImageCount ); 9466 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR( 9467 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 9468 } 9469 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9470 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 9471 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 9472 if ( swapchainImageCount < swapchainImages.size() ) 9473 { 9474 swapchainImages.resize( swapchainImageCount ); 9475 } 9476 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); 9477 } 9478 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9479 9480 template <typename Dispatch> acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,uint32_t * pImageIndex,Dispatch const & d) const9481 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9482 uint64_t timeout, 9483 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 9484 VULKAN_HPP_NAMESPACE::Fence fence, 9485 uint32_t * pImageIndex, 9486 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9487 { 9488 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9489 return static_cast<Result>( d.vkAcquireNextImageKHR( static_cast<VkDevice>( m_device ), 9490 static_cast<VkSwapchainKHR>( swapchain ), 9491 timeout, 9492 static_cast<VkSemaphore>( semaphore ), 9493 static_cast<VkFence>( fence ), 9494 pImageIndex ) ); 9495 } 9496 9497 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9498 template <typename Dispatch> acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const9499 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 9500 uint64_t timeout, 9501 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 9502 VULKAN_HPP_NAMESPACE::Fence fence, 9503 Dispatch const & d ) const 9504 { 9505 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9506 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9507 VULKAN_HPP_ASSERT( d.vkAcquireNextImageKHR && "Function <vkAcquireNextImageKHR> requires <VK_KHR_swapchain>" ); 9508 # endif 9509 9510 uint32_t imageIndex; 9511 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireNextImageKHR( 9512 m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) ); 9513 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 9514 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", 9515 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 9516 VULKAN_HPP_NAMESPACE::Result::eTimeout, 9517 VULKAN_HPP_NAMESPACE::Result::eNotReady, 9518 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 9519 9520 return ResultValue<uint32_t>( result, std::move( imageIndex ) ); 9521 } 9522 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9523 9524 template <typename Dispatch> presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,Dispatch const & d) const9525 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, 9526 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9527 { 9528 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9529 return static_cast<Result>( d.vkQueuePresentKHR( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) ); 9530 } 9531 9532 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9533 template <typename Dispatch> presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,Dispatch const & d) const9534 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, 9535 Dispatch const & d ) const 9536 { 9537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9538 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9539 VULKAN_HPP_ASSERT( d.vkQueuePresentKHR && "Function <vkQueuePresentKHR> requires <VK_KHR_swapchain>" ); 9540 # endif 9541 9542 VULKAN_HPP_NAMESPACE::Result result = 9543 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) ); 9544 VULKAN_HPP_NAMESPACE::detail::resultCheck( 9545 result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 9546 9547 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 9548 } 9549 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9550 9551 template <typename Dispatch> getGroupPresentCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,Dispatch const & d) const9552 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( 9553 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9554 { 9555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9556 return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( 9557 static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) ); 9558 } 9559 9560 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9561 template <typename Dispatch> 9562 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const & d) const9563 Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const 9564 { 9565 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9566 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9567 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPresentCapabilitiesKHR && 9568 "Function <vkGetDeviceGroupPresentCapabilitiesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9569 # endif 9570 9571 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; 9572 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9573 d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) ); 9574 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); 9575 9576 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); 9577 } 9578 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9579 9580 template <typename Dispatch> getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const9581 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9582 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 9583 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9584 { 9585 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9586 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( 9587 static_cast<VkDevice>( m_device ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 9588 } 9589 9590 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9591 template <typename Dispatch> 9592 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const9593 Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 9594 { 9595 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9596 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9597 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModesKHR && 9598 "Function <vkGetDeviceGroupSurfacePresentModesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9599 # endif 9600 9601 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 9602 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( 9603 m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 9604 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); 9605 9606 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); 9607 } 9608 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9609 9610 template <typename Dispatch> getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pRectCount,VULKAN_HPP_NAMESPACE::Rect2D * pRects,Dispatch const & d) const9611 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 9612 uint32_t * pRectCount, 9613 VULKAN_HPP_NAMESPACE::Rect2D * pRects, 9614 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9615 { 9616 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9617 return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 9618 static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) ); 9619 } 9620 9621 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9622 template <typename Rect2DAllocator, 9623 typename Dispatch, 9624 typename std::enable_if<std::is_same<typename Rect2DAllocator::value_type, VULKAN_HPP_NAMESPACE::Rect2D>::value, int>::type> 9625 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const9626 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 9627 { 9628 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9629 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9630 VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && 9631 "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9632 # endif 9633 9634 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects; 9635 uint32_t rectCount; 9636 VULKAN_HPP_NAMESPACE::Result result; 9637 do 9638 { 9639 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9640 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 9641 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) 9642 { 9643 rects.resize( rectCount ); 9644 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 9645 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 9646 } 9647 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9648 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 9649 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 9650 if ( rectCount < rects.size() ) 9651 { 9652 rects.resize( rectCount ); 9653 } 9654 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); 9655 } 9656 9657 template <typename Rect2DAllocator, 9658 typename Dispatch, 9659 typename std::enable_if<std::is_same<typename Rect2DAllocator::value_type, VULKAN_HPP_NAMESPACE::Rect2D>::value, int>::type> 9660 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Rect2DAllocator & rect2DAllocator,Dispatch const & d) const9661 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const 9662 { 9663 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9664 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9665 VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && 9666 "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9667 # endif 9668 9669 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator ); 9670 uint32_t rectCount; 9671 VULKAN_HPP_NAMESPACE::Result result; 9672 do 9673 { 9674 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9675 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 9676 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) 9677 { 9678 rects.resize( rectCount ); 9679 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 9680 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 9681 } 9682 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9683 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 9684 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 9685 if ( rectCount < rects.size() ) 9686 { 9687 rects.resize( rectCount ); 9688 } 9689 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); 9690 } 9691 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9692 9693 template <typename Dispatch> acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex,Dispatch const & d) const9694 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, 9695 uint32_t * pImageIndex, 9696 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9697 { 9698 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9699 return static_cast<Result>( 9700 d.vkAcquireNextImage2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) ); 9701 } 9702 9703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9704 template <typename Dispatch> acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,Dispatch const & d) const9705 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, 9706 Dispatch const & d ) const 9707 { 9708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9709 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9710 VULKAN_HPP_ASSERT( d.vkAcquireNextImage2KHR && "Function <vkAcquireNextImage2KHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 9711 # endif 9712 9713 uint32_t imageIndex; 9714 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9715 d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) ); 9716 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 9717 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", 9718 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 9719 VULKAN_HPP_NAMESPACE::Result::eTimeout, 9720 VULKAN_HPP_NAMESPACE::Result::eNotReady, 9721 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 9722 9723 return ResultValue<uint32_t>( result, std::move( imageIndex ) ); 9724 } 9725 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9726 9727 //=== VK_KHR_display === 9728 9729 template <typename Dispatch> getDisplayPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,Dispatch const & d) const9730 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, 9731 VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, 9732 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9733 { 9734 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9735 return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( 9736 static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) ); 9737 } 9738 9739 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9740 template < 9741 typename DisplayPropertiesKHRAllocator, 9742 typename Dispatch, 9743 typename std::enable_if<std::is_same<typename DisplayPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, int>::type> 9744 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(Dispatch const & d) const9745 PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const 9746 { 9747 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9748 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9749 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" ); 9750 # endif 9751 9752 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties; 9753 uint32_t propertyCount; 9754 VULKAN_HPP_NAMESPACE::Result result; 9755 do 9756 { 9757 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9758 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9759 { 9760 properties.resize( propertyCount ); 9761 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9762 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 9763 } 9764 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9765 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 9766 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9767 if ( propertyCount < properties.size() ) 9768 { 9769 properties.resize( propertyCount ); 9770 } 9771 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9772 } 9773 9774 template < 9775 typename DisplayPropertiesKHRAllocator, 9776 typename Dispatch, 9777 typename std::enable_if<std::is_same<typename DisplayPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, int>::type> 9778 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,Dispatch const & d) const9779 PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const 9780 { 9781 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9782 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9783 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" ); 9784 # endif 9785 9786 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator ); 9787 uint32_t propertyCount; 9788 VULKAN_HPP_NAMESPACE::Result result; 9789 do 9790 { 9791 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9792 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9793 { 9794 properties.resize( propertyCount ); 9795 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9796 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 9797 } 9798 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9799 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 9800 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9801 if ( propertyCount < properties.size() ) 9802 { 9803 properties.resize( propertyCount ); 9804 } 9805 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9806 } 9807 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9808 9809 template <typename Dispatch> getDisplayPlanePropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,Dispatch const & d) const9810 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, 9811 VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, 9812 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9813 { 9814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9815 return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 9816 static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) ); 9817 } 9818 9819 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9820 template < 9821 typename DisplayPlanePropertiesKHRAllocator, 9822 typename Dispatch, 9823 typename std::enable_if<std::is_same<typename DisplayPlanePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, 9824 int>::type> 9825 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9826 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(Dispatch const & d) const9827 PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const 9828 { 9829 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9830 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9831 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" ); 9832 # endif 9833 9834 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties; 9835 uint32_t propertyCount; 9836 VULKAN_HPP_NAMESPACE::Result result; 9837 do 9838 { 9839 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9840 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9841 { 9842 properties.resize( propertyCount ); 9843 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 9844 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 9845 } 9846 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9847 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 9848 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9849 if ( propertyCount < properties.size() ) 9850 { 9851 properties.resize( propertyCount ); 9852 } 9853 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9854 } 9855 9856 template < 9857 typename DisplayPlanePropertiesKHRAllocator, 9858 typename Dispatch, 9859 typename std::enable_if<std::is_same<typename DisplayPlanePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, 9860 int>::type> 9861 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9862 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,Dispatch const & d) const9863 PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const 9864 { 9865 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9866 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9867 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" ); 9868 # endif 9869 9870 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator ); 9871 uint32_t propertyCount; 9872 VULKAN_HPP_NAMESPACE::Result result; 9873 do 9874 { 9875 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9876 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9877 { 9878 properties.resize( propertyCount ); 9879 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 9880 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 9881 } 9882 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9883 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 9884 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9885 if ( propertyCount < properties.size() ) 9886 { 9887 properties.resize( propertyCount ); 9888 } 9889 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9890 } 9891 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9892 9893 template <typename Dispatch> getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,uint32_t * pDisplayCount,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,Dispatch const & d) const9894 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, 9895 uint32_t * pDisplayCount, 9896 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, 9897 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9898 { 9899 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9900 return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( 9901 static_cast<VkPhysicalDevice>( m_physicalDevice ), planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) ); 9902 } 9903 9904 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9905 template <typename DisplayKHRAllocator, 9906 typename Dispatch, 9907 typename std::enable_if<std::is_same<typename DisplayKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayKHR>::value, int>::type> 9908 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,Dispatch const & d) const9909 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const 9910 { 9911 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9912 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9913 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" ); 9914 # endif 9915 9916 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays; 9917 uint32_t displayCount; 9918 VULKAN_HPP_NAMESPACE::Result result; 9919 do 9920 { 9921 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 9922 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) 9923 { 9924 displays.resize( displayCount ); 9925 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9926 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 9927 } 9928 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9929 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 9930 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 9931 if ( displayCount < displays.size() ) 9932 { 9933 displays.resize( displayCount ); 9934 } 9935 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); 9936 } 9937 9938 template <typename DisplayKHRAllocator, 9939 typename Dispatch, 9940 typename std::enable_if<std::is_same<typename DisplayKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayKHR>::value, int>::type> 9941 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,DisplayKHRAllocator & displayKHRAllocator,Dispatch const & d) const9942 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const 9943 { 9944 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9945 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9946 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" ); 9947 # endif 9948 9949 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator ); 9950 uint32_t displayCount; 9951 VULKAN_HPP_NAMESPACE::Result result; 9952 do 9953 { 9954 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 9955 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) 9956 { 9957 displays.resize( displayCount ); 9958 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9959 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 9960 } 9961 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9962 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 9963 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 9964 if ( displayCount < displays.size() ) 9965 { 9966 displays.resize( displayCount ); 9967 } 9968 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); 9969 } 9970 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9971 9972 template <typename Dispatch> getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,Dispatch const & d) const9973 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9974 uint32_t * pPropertyCount, 9975 VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, 9976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9977 { 9978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9979 return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 9980 static_cast<VkDisplayKHR>( display ), 9981 pPropertyCount, 9982 reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) ); 9983 } 9984 9985 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9986 template <typename DisplayModePropertiesKHRAllocator, 9987 typename Dispatch, 9988 typename std::enable_if<std::is_same<typename DisplayModePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, 9989 int>::type> 9990 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9991 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const9992 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 9993 { 9994 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9995 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9996 VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" ); 9997 # endif 9998 9999 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties; 10000 uint32_t propertyCount; 10001 VULKAN_HPP_NAMESPACE::Result result; 10002 do 10003 { 10004 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10005 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 10006 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 10007 { 10008 properties.resize( propertyCount ); 10009 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR( 10010 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 10011 } 10012 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 10013 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 10014 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 10015 if ( propertyCount < properties.size() ) 10016 { 10017 properties.resize( propertyCount ); 10018 } 10019 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 10020 } 10021 10022 template <typename DisplayModePropertiesKHRAllocator, 10023 typename Dispatch, 10024 typename std::enable_if<std::is_same<typename DisplayModePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, 10025 int>::type> 10026 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10027 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,Dispatch const & d) const10028 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 10029 DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, 10030 Dispatch const & d ) const 10031 { 10032 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10033 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10034 VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" ); 10035 # endif 10036 10037 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator ); 10038 uint32_t propertyCount; 10039 VULKAN_HPP_NAMESPACE::Result result; 10040 do 10041 { 10042 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10043 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 10044 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 10045 { 10046 properties.resize( propertyCount ); 10047 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR( 10048 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 10049 } 10050 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 10051 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 10052 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 10053 if ( propertyCount < properties.size() ) 10054 { 10055 properties.resize( propertyCount ); 10056 } 10057 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 10058 } 10059 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10060 10061 template <typename Dispatch> createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,Dispatch const & d) const10062 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 10063 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, 10064 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10065 VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, 10066 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10067 { 10068 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10069 return static_cast<Result>( d.vkCreateDisplayModeKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 10070 static_cast<VkDisplayKHR>( display ), 10071 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), 10072 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10073 reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) ); 10074 } 10075 10076 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10077 template <typename Dispatch> 10078 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10079 PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 10080 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, 10081 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10082 Dispatch const & d ) const 10083 { 10084 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10085 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10086 VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" ); 10087 # endif 10088 10089 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 10090 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10091 d.vkCreateDisplayModeKHR( m_physicalDevice, 10092 static_cast<VkDisplayKHR>( display ), 10093 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 10094 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10095 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 10096 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); 10097 10098 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( mode ) ); 10099 } 10100 10101 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10102 template <typename Dispatch> 10103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type createDisplayModeKHRUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10104 PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 10105 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, 10106 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10107 Dispatch const & d ) const 10108 { 10109 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10110 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10111 VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" ); 10112 # endif 10113 10114 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 10115 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10116 d.vkCreateDisplayModeKHR( m_physicalDevice, 10117 static_cast<VkDisplayKHR>( display ), 10118 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 10119 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10120 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 10121 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); 10122 10123 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10124 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, detail::ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) ); 10125 } 10126 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10127 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10128 10129 template <typename Dispatch> 10130 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,Dispatch const & d) const10131 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, 10132 uint32_t planeIndex, 10133 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, 10134 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10135 { 10136 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10137 return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 10138 static_cast<VkDisplayModeKHR>( mode ), 10139 planeIndex, 10140 reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) ); 10141 } 10142 10143 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10144 template <typename Dispatch> 10145 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,Dispatch const & d) const10146 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const 10147 { 10148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10149 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10150 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilitiesKHR && "Function <vkGetDisplayPlaneCapabilitiesKHR> requires <VK_KHR_display>" ); 10151 # endif 10152 10153 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; 10154 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilitiesKHR( 10155 m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) ); 10156 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); 10157 10158 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); 10159 } 10160 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10161 10162 template <typename Dispatch> createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10163 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, 10164 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10165 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10166 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10167 { 10168 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10169 return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( static_cast<VkInstance>( m_instance ), 10170 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), 10171 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10172 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10173 } 10174 10175 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10176 template <typename Dispatch> 10177 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10178 Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, 10179 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10180 Dispatch const & d ) const 10181 { 10182 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10183 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10184 VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" ); 10185 # endif 10186 10187 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10188 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR( 10189 m_instance, 10190 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 10191 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10192 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10193 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); 10194 10195 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10196 } 10197 10198 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10199 template <typename Dispatch> 10200 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDisplayPlaneSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10201 Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, 10202 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10203 Dispatch const & d ) const 10204 { 10205 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10206 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10207 VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" ); 10208 # endif 10209 10210 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10211 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR( 10212 m_instance, 10213 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 10214 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10215 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10216 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); 10217 10218 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10219 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10220 } 10221 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10222 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10223 10224 //=== VK_KHR_display_swapchain === 10225 10226 template <typename Dispatch> createSharedSwapchainsKHR(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,Dispatch const & d) const10227 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, 10228 const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, 10229 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10230 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 10231 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10232 { 10233 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10234 return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( static_cast<VkDevice>( m_device ), 10235 swapchainCount, 10236 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), 10237 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10238 reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) ); 10239 } 10240 10241 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10242 template <typename SwapchainKHRAllocator, 10243 typename Dispatch, 10244 typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SwapchainKHR>::value, int>::type> 10245 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10246 Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 10247 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10248 Dispatch const & d ) const 10249 { 10250 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10251 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10252 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10253 # endif 10254 10255 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() ); 10256 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10257 m_device, 10258 createInfos.size(), 10259 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 10260 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10261 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 10262 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 10263 10264 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); 10265 } 10266 10267 template <typename SwapchainKHRAllocator, 10268 typename Dispatch, 10269 typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SwapchainKHR>::value, int>::type> 10270 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const10271 Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 10272 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10273 SwapchainKHRAllocator & swapchainKHRAllocator, 10274 Dispatch const & d ) const 10275 { 10276 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10277 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10278 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10279 # endif 10280 10281 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator ); 10282 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10283 m_device, 10284 createInfos.size(), 10285 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 10286 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10287 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 10288 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 10289 10290 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); 10291 } 10292 10293 template <typename Dispatch> 10294 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSharedSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10295 Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 10296 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10297 Dispatch const & d ) const 10298 { 10299 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10300 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10301 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10302 # endif 10303 10304 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 10305 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10306 m_device, 10307 1, 10308 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 10309 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10310 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 10311 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); 10312 10313 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); 10314 } 10315 10316 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10317 template <typename Dispatch, 10318 typename SwapchainKHRAllocator, 10319 typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::value, 10320 int>::type> 10321 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10322 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10323 Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 10324 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10325 Dispatch const & d ) const 10326 { 10327 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10328 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10329 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10330 # endif 10331 10332 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); 10333 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10334 m_device, 10335 createInfos.size(), 10336 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 10337 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10338 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 10339 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 10340 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains; 10341 uniqueSwapchains.reserve( createInfos.size() ); 10342 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 10343 for ( auto const & swapchain : swapchains ) 10344 { 10345 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); 10346 } 10347 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); 10348 } 10349 10350 template <typename Dispatch, 10351 typename SwapchainKHRAllocator, 10352 typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::value, 10353 int>::type> 10354 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10355 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const10356 Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 10357 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10358 SwapchainKHRAllocator & swapchainKHRAllocator, 10359 Dispatch const & d ) const 10360 { 10361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10362 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10363 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10364 # endif 10365 10366 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); 10367 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10368 m_device, 10369 createInfos.size(), 10370 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 10371 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10372 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 10373 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 10374 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); 10375 uniqueSwapchains.reserve( createInfos.size() ); 10376 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 10377 for ( auto const & swapchain : swapchains ) 10378 { 10379 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); 10380 } 10381 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); 10382 } 10383 10384 template <typename Dispatch> 10385 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10386 Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 10387 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10388 Dispatch const & d ) const 10389 { 10390 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10391 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10392 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 10393 # endif 10394 10395 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 10396 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 10397 m_device, 10398 1, 10399 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 10400 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10401 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 10402 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); 10403 10404 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10405 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 10406 } 10407 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10408 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10409 10410 #if defined( VK_USE_PLATFORM_XLIB_KHR ) 10411 //=== VK_KHR_xlib_surface === 10412 10413 template <typename Dispatch> createXlibSurfaceKHR(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10414 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, 10415 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10416 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10417 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10418 { 10419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10420 return static_cast<Result>( d.vkCreateXlibSurfaceKHR( static_cast<VkInstance>( m_instance ), 10421 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), 10422 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10423 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10424 } 10425 10426 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10427 template <typename Dispatch> 10428 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10429 Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, 10430 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10431 Dispatch const & d ) const 10432 { 10433 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10434 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10435 VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" ); 10436 # endif 10437 10438 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10439 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10440 d.vkCreateXlibSurfaceKHR( m_instance, 10441 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 10442 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10443 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10444 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); 10445 10446 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10447 } 10448 10449 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10450 template <typename Dispatch> 10451 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXlibSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10452 Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, 10453 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10454 Dispatch const & d ) const 10455 { 10456 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10457 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10458 VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" ); 10459 # endif 10460 10461 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10462 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10463 d.vkCreateXlibSurfaceKHR( m_instance, 10464 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 10465 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10466 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10467 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); 10468 10469 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10470 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10471 } 10472 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10473 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10474 10475 template <typename Dispatch> 10476 VULKAN_HPP_INLINE Bool32 getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display * dpy,VisualID visualID,Dispatch const & d) const10477 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10478 { 10479 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10480 return static_cast<Bool32>( 10481 d.vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, dpy, visualID ) ); 10482 } 10483 10484 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10485 template <typename Dispatch> 10486 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display & dpy,VisualID visualID,Dispatch const & d) const10487 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10488 { 10489 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10490 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10491 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXlibPresentationSupportKHR && 10492 "Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> requires <VK_KHR_xlib_surface>" ); 10493 # endif 10494 10495 VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); 10496 10497 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 10498 } 10499 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10500 #endif /*VK_USE_PLATFORM_XLIB_KHR*/ 10501 10502 #if defined( VK_USE_PLATFORM_XCB_KHR ) 10503 //=== VK_KHR_xcb_surface === 10504 10505 template <typename Dispatch> createXcbSurfaceKHR(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10506 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, 10507 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10508 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10509 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10510 { 10511 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10512 return static_cast<Result>( d.vkCreateXcbSurfaceKHR( static_cast<VkInstance>( m_instance ), 10513 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), 10514 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10515 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10516 } 10517 10518 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10519 template <typename Dispatch> 10520 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10521 Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, 10522 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10523 Dispatch const & d ) const 10524 { 10525 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10526 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10527 VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" ); 10528 # endif 10529 10530 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10531 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10532 d.vkCreateXcbSurfaceKHR( m_instance, 10533 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 10534 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10535 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10536 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); 10537 10538 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10539 } 10540 10541 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10542 template <typename Dispatch> 10543 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXcbSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10544 Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, 10545 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10546 Dispatch const & d ) const 10547 { 10548 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10549 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10550 VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" ); 10551 # endif 10552 10553 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10554 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10555 d.vkCreateXcbSurfaceKHR( m_instance, 10556 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 10557 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10558 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10559 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); 10560 10561 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10562 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10563 } 10564 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10565 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10566 10567 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id,Dispatch const & d) const10568 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 10569 xcb_connection_t * connection, 10570 xcb_visualid_t visual_id, 10571 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10572 { 10573 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10574 return static_cast<Bool32>( 10575 d.vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, connection, visual_id ) ); 10576 } 10577 10578 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10579 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t & connection,xcb_visualid_t visual_id,Dispatch const & d) const10580 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 10581 xcb_connection_t & connection, 10582 xcb_visualid_t visual_id, 10583 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10584 { 10585 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10586 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10587 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXcbPresentationSupportKHR && 10588 "Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> requires <VK_KHR_xcb_surface>" ); 10589 # endif 10590 10591 VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); 10592 10593 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 10594 } 10595 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10596 #endif /*VK_USE_PLATFORM_XCB_KHR*/ 10597 10598 #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) 10599 //=== VK_KHR_wayland_surface === 10600 10601 template <typename Dispatch> createWaylandSurfaceKHR(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10602 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, 10603 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10604 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10605 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10606 { 10607 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10608 return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( static_cast<VkInstance>( m_instance ), 10609 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), 10610 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10611 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10612 } 10613 10614 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10615 template <typename Dispatch> 10616 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10617 Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, 10618 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10619 Dispatch const & d ) const 10620 { 10621 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10622 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10623 VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" ); 10624 # endif 10625 10626 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10627 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR( 10628 m_instance, 10629 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 10630 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10631 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10632 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); 10633 10634 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10635 } 10636 10637 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10638 template <typename Dispatch> 10639 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWaylandSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10640 Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, 10641 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10642 Dispatch const & d ) const 10643 { 10644 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10645 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10646 VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" ); 10647 # endif 10648 10649 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10650 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR( 10651 m_instance, 10652 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 10653 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10654 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10655 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); 10656 10657 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10658 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10659 } 10660 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10661 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10662 10663 template <typename Dispatch> getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display * display,Dispatch const & d) const10664 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, 10665 struct wl_display * display, 10666 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10667 { 10668 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10669 return static_cast<Bool32>( 10670 d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, display ) ); 10671 } 10672 10673 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10674 template <typename Dispatch> 10675 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display & display,Dispatch const & d) const10676 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10677 { 10678 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10679 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10680 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR && 10681 "Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> requires <VK_KHR_wayland_surface>" ); 10682 # endif 10683 10684 VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); 10685 10686 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 10687 } 10688 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10689 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ 10690 10691 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 10692 //=== VK_KHR_android_surface === 10693 10694 template <typename Dispatch> createAndroidSurfaceKHR(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10695 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, 10696 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10697 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10698 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10699 { 10700 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10701 return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( static_cast<VkInstance>( m_instance ), 10702 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), 10703 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10704 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10705 } 10706 10707 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10708 template <typename Dispatch> 10709 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10710 Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, 10711 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10712 Dispatch const & d ) const 10713 { 10714 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10715 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10716 VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" ); 10717 # endif 10718 10719 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10720 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR( 10721 m_instance, 10722 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 10723 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10724 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10725 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); 10726 10727 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10728 } 10729 10730 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10731 template <typename Dispatch> 10732 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createAndroidSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10733 Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, 10734 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10735 Dispatch const & d ) const 10736 { 10737 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10738 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10739 VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" ); 10740 # endif 10741 10742 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10743 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR( 10744 m_instance, 10745 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 10746 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10747 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10748 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); 10749 10750 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10751 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10752 } 10753 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10754 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10755 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 10756 10757 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 10758 //=== VK_KHR_win32_surface === 10759 10760 template <typename Dispatch> createWin32SurfaceKHR(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10761 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, 10762 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10763 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10764 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10765 { 10766 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10767 return static_cast<Result>( d.vkCreateWin32SurfaceKHR( static_cast<VkInstance>( m_instance ), 10768 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), 10769 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10770 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10771 } 10772 10773 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10774 template <typename Dispatch> 10775 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10776 Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, 10777 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10778 Dispatch const & d ) const 10779 { 10780 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10781 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10782 VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" ); 10783 # endif 10784 10785 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10786 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10787 d.vkCreateWin32SurfaceKHR( m_instance, 10788 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 10789 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10790 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10791 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); 10792 10793 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10794 } 10795 10796 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10797 template <typename Dispatch> 10798 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWin32SurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10799 Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, 10800 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10801 Dispatch const & d ) const 10802 { 10803 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10804 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10805 VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" ); 10806 # endif 10807 10808 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10809 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10810 d.vkCreateWin32SurfaceKHR( m_instance, 10811 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 10812 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10813 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10814 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); 10815 10816 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10817 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10818 } 10819 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10820 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10821 10822 template <typename Dispatch> getWin32PresentationSupportKHR(uint32_t queueFamilyIndex,Dispatch const & d) const10823 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10824 { 10825 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10826 return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex ) ); 10827 } 10828 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 10829 10830 //=== VK_EXT_debug_report === 10831 10832 template <typename Dispatch> 10833 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,Dispatch const & d) const10834 Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, 10835 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10836 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, 10837 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10838 { 10839 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10840 return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( static_cast<VkInstance>( m_instance ), 10841 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), 10842 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10843 reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) ); 10844 } 10845 10846 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10847 template <typename Dispatch> 10848 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10849 Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, 10850 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10851 Dispatch const & d ) const 10852 { 10853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10854 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10855 VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10856 # endif 10857 10858 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 10859 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT( 10860 m_instance, 10861 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 10862 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10863 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 10864 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); 10865 10866 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( callback ) ); 10867 } 10868 10869 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10870 template <typename Dispatch> 10871 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type createDebugReportCallbackEXTUnique(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10872 Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, 10873 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10874 Dispatch const & d ) const 10875 { 10876 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10877 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10878 VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10879 # endif 10880 10881 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 10882 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT( 10883 m_instance, 10884 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 10885 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10886 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 10887 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); 10888 10889 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10890 result, 10891 UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( callback, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10892 } 10893 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10894 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10895 10896 template <typename Dispatch> destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10897 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10898 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10899 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10900 { 10901 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10902 d.vkDestroyDebugReportCallbackEXT( 10903 static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10904 } 10905 10906 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10907 template <typename Dispatch> destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10908 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10909 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10910 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10911 { 10912 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10913 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10914 VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10915 # endif 10916 10917 d.vkDestroyDebugReportCallbackEXT( 10918 m_instance, 10919 static_cast<VkDebugReportCallbackEXT>( callback ), 10920 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10921 } 10922 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10923 10924 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10925 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10926 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10927 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10928 { 10929 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10930 d.vkDestroyDebugReportCallbackEXT( 10931 static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10932 } 10933 10934 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10935 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10936 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10937 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10938 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10939 { 10940 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10941 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10942 VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10943 # endif 10944 10945 d.vkDestroyDebugReportCallbackEXT( 10946 m_instance, 10947 static_cast<VkDebugReportCallbackEXT>( callback ), 10948 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10949 } 10950 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10951 10952 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,uint64_t object,size_t location,int32_t messageCode,const char * pLayerPrefix,const char * pMessage,Dispatch const & d) const10953 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 10954 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, 10955 uint64_t object, 10956 size_t location, 10957 int32_t messageCode, 10958 const char * pLayerPrefix, 10959 const char * pMessage, 10960 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10961 { 10962 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10963 d.vkDebugReportMessageEXT( static_cast<VkInstance>( m_instance ), 10964 static_cast<VkDebugReportFlagsEXT>( flags ), 10965 static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), 10966 object, 10967 location, 10968 messageCode, 10969 pLayerPrefix, 10970 pMessage ); 10971 } 10972 10973 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10974 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,uint64_t object,size_t location,int32_t messageCode,const std::string & layerPrefix,const std::string & message,Dispatch const & d) const10975 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 10976 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, 10977 uint64_t object, 10978 size_t location, 10979 int32_t messageCode, 10980 const std::string & layerPrefix, 10981 const std::string & message, 10982 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10983 { 10984 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10985 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10986 VULKAN_HPP_ASSERT( d.vkDebugReportMessageEXT && "Function <vkDebugReportMessageEXT> requires <VK_EXT_debug_report>" ); 10987 # endif 10988 10989 d.vkDebugReportMessageEXT( m_instance, 10990 static_cast<VkDebugReportFlagsEXT>( flags ), 10991 static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), 10992 object, 10993 location, 10994 messageCode, 10995 layerPrefix.c_str(), 10996 message.c_str() ); 10997 } 10998 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10999 11000 //=== VK_EXT_debug_marker === 11001 11002 template <typename Dispatch> debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,Dispatch const & d) const11003 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, 11004 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11005 { 11006 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11007 return static_cast<Result>( 11008 d.vkDebugMarkerSetObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) ); 11009 } 11010 11011 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11012 template <typename Dispatch> 11013 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo,Dispatch const & d) const11014 Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 11015 { 11016 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11017 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11018 VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectTagEXT && "Function <vkDebugMarkerSetObjectTagEXT> requires <VK_EXT_debug_marker>" ); 11019 # endif 11020 11021 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11022 d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) ); 11023 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); 11024 11025 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 11026 } 11027 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11028 11029 template <typename Dispatch> debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,Dispatch const & d) const11030 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, 11031 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11032 { 11033 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11034 return static_cast<Result>( 11035 d.vkDebugMarkerSetObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) ); 11036 } 11037 11038 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11039 template <typename Dispatch> 11040 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo,Dispatch const & d) const11041 Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 11042 { 11043 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11044 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11045 VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectNameEXT && "Function <vkDebugMarkerSetObjectNameEXT> requires <VK_EXT_debug_marker>" ); 11046 # endif 11047 11048 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11049 d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) ); 11050 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); 11051 11052 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 11053 } 11054 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11055 11056 template <typename Dispatch> debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const11057 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 11058 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11059 { 11060 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11061 d.vkCmdDebugMarkerBeginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 11062 } 11063 11064 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11065 template <typename Dispatch> debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const11066 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, 11067 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11068 { 11069 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11070 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11071 VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerBeginEXT && "Function <vkCmdDebugMarkerBeginEXT> requires <VK_EXT_debug_marker>" ); 11072 # endif 11073 11074 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 11075 } 11076 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11077 11078 template <typename Dispatch> debugMarkerEndEXT(Dispatch const & d) const11079 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11080 { 11081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11082 d.vkCmdDebugMarkerEndEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) ); 11083 } 11084 11085 template <typename Dispatch> debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const11086 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 11087 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11088 { 11089 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11090 d.vkCmdDebugMarkerInsertEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 11091 } 11092 11093 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11094 template <typename Dispatch> debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const11095 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, 11096 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11097 { 11098 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11099 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11100 VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerInsertEXT && "Function <vkCmdDebugMarkerInsertEXT> requires <VK_EXT_debug_marker>" ); 11101 # endif 11102 11103 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 11104 } 11105 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11106 11107 //=== VK_KHR_video_queue === 11108 11109 template <typename Dispatch> getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,Dispatch const & d) const11110 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, 11111 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, 11112 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11113 { 11114 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11115 return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 11116 reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), 11117 reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) ); 11118 } 11119 11120 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11121 template <typename Dispatch> 11122 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,Dispatch const & d) const11123 PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const 11124 { 11125 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11126 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11127 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" ); 11128 # endif 11129 11130 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; 11131 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 11132 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 11133 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 11134 11135 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); 11136 } 11137 11138 template <typename X, typename Y, typename... Z, typename Dispatch> 11139 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,Dispatch const & d) const11140 PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const 11141 { 11142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11143 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11144 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" ); 11145 # endif 11146 11147 StructureChain<X, Y, Z...> structureChain; 11148 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>(); 11149 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 11150 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 11151 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 11152 11153 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 11154 } 11155 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11156 11157 template <typename Dispatch> 11158 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,uint32_t * pVideoFormatPropertyCount,VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,Dispatch const & d) const11159 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, 11160 uint32_t * pVideoFormatPropertyCount, 11161 VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, 11162 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11163 { 11164 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11165 return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 11166 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ), 11167 pVideoFormatPropertyCount, 11168 reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) ); 11169 } 11170 11171 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11172 template <typename VideoFormatPropertiesKHRAllocator, 11173 typename Dispatch, 11174 typename std::enable_if<std::is_same<typename VideoFormatPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, 11175 int>::type> 11176 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11177 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,Dispatch const & d) const11178 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const 11179 { 11180 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11181 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11182 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && 11183 "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); 11184 # endif 11185 11186 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties; 11187 uint32_t videoFormatPropertyCount; 11188 VULKAN_HPP_NAMESPACE::Result result; 11189 do 11190 { 11191 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 11192 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 11193 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 11194 { 11195 videoFormatProperties.resize( videoFormatPropertyCount ); 11196 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11197 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 11198 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 11199 &videoFormatPropertyCount, 11200 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 11201 } 11202 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11203 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 11204 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 11205 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 11206 { 11207 videoFormatProperties.resize( videoFormatPropertyCount ); 11208 } 11209 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); 11210 } 11211 11212 template <typename VideoFormatPropertiesKHRAllocator, 11213 typename Dispatch, 11214 typename std::enable_if<std::is_same<typename VideoFormatPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, 11215 int>::type> 11216 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11217 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,Dispatch const & d) const11218 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, 11219 VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, 11220 Dispatch const & d ) const 11221 { 11222 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11223 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11224 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && 11225 "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); 11226 # endif 11227 11228 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator ); 11229 uint32_t videoFormatPropertyCount; 11230 VULKAN_HPP_NAMESPACE::Result result; 11231 do 11232 { 11233 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 11234 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 11235 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 11236 { 11237 videoFormatProperties.resize( videoFormatPropertyCount ); 11238 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11239 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 11240 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 11241 &videoFormatPropertyCount, 11242 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 11243 } 11244 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11245 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 11246 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 11247 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 11248 { 11249 videoFormatProperties.resize( videoFormatPropertyCount ); 11250 } 11251 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); 11252 } 11253 11254 template <typename StructureChain, 11255 typename StructureChainAllocator, 11256 typename Dispatch, 11257 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 11258 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,Dispatch const & d) const11259 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const 11260 { 11261 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11262 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11263 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && 11264 "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); 11265 # endif 11266 11267 std::vector<StructureChain, StructureChainAllocator> structureChains; 11268 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR> videoFormatProperties; 11269 uint32_t videoFormatPropertyCount; 11270 VULKAN_HPP_NAMESPACE::Result result; 11271 do 11272 { 11273 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 11274 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 11275 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 11276 { 11277 structureChains.resize( videoFormatPropertyCount ); 11278 videoFormatProperties.resize( videoFormatPropertyCount ); 11279 for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) 11280 { 11281 videoFormatProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>().pNext; 11282 } 11283 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11284 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 11285 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 11286 &videoFormatPropertyCount, 11287 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 11288 } 11289 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11290 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 11291 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 11292 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 11293 { 11294 structureChains.resize( videoFormatPropertyCount ); 11295 } 11296 for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) 11297 { 11298 structureChains[i].template get<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>() = videoFormatProperties[i]; 11299 } 11300 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 11301 } 11302 11303 template <typename StructureChain, 11304 typename StructureChainAllocator, 11305 typename Dispatch, 11306 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 11307 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,StructureChainAllocator & structureChainAllocator,Dispatch const & d) const11308 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, 11309 StructureChainAllocator & structureChainAllocator, 11310 Dispatch const & d ) const 11311 { 11312 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11313 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11314 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && 11315 "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); 11316 # endif 11317 11318 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 11319 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR> videoFormatProperties; 11320 uint32_t videoFormatPropertyCount; 11321 VULKAN_HPP_NAMESPACE::Result result; 11322 do 11323 { 11324 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 11325 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 11326 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 11327 { 11328 structureChains.resize( videoFormatPropertyCount ); 11329 videoFormatProperties.resize( videoFormatPropertyCount ); 11330 for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) 11331 { 11332 videoFormatProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>().pNext; 11333 } 11334 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11335 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 11336 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 11337 &videoFormatPropertyCount, 11338 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 11339 } 11340 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11341 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 11342 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 11343 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 11344 { 11345 structureChains.resize( videoFormatPropertyCount ); 11346 } 11347 for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) 11348 { 11349 structureChains[i].template get<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>() = videoFormatProperties[i]; 11350 } 11351 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 11352 } 11353 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11354 11355 template <typename Dispatch> createVideoSessionKHR(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,Dispatch const & d) const11356 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, 11357 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11358 VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, 11359 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11360 { 11361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11362 return static_cast<Result>( d.vkCreateVideoSessionKHR( static_cast<VkDevice>( m_device ), 11363 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ), 11364 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11365 reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) ); 11366 } 11367 11368 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11369 template <typename Dispatch> 11370 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type createVideoSessionKHR(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11371 Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, 11372 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11373 Dispatch const & d ) const 11374 { 11375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11376 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11377 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" ); 11378 # endif 11379 11380 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 11381 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11382 d.vkCreateVideoSessionKHR( m_device, 11383 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 11384 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11385 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 11386 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); 11387 11388 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSession ) ); 11389 } 11390 11391 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11392 template <typename Dispatch> 11393 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type createVideoSessionKHRUnique(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11394 Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, 11395 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11396 Dispatch const & d ) const 11397 { 11398 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11399 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11400 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" ); 11401 # endif 11402 11403 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 11404 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11405 d.vkCreateVideoSessionKHR( m_device, 11406 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 11407 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11408 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 11409 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); 11410 11411 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 11412 result, UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 11413 } 11414 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11415 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11416 11417 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11418 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11419 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11420 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11421 { 11422 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11423 d.vkDestroyVideoSessionKHR( 11424 static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11425 } 11426 11427 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11428 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11429 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11430 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11431 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11432 { 11433 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11434 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11435 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" ); 11436 # endif 11437 11438 d.vkDestroyVideoSessionKHR( 11439 m_device, 11440 static_cast<VkVideoSessionKHR>( videoSession ), 11441 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11442 } 11443 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11444 11445 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11446 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11447 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11448 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11449 { 11450 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11451 d.vkDestroyVideoSessionKHR( 11452 static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11453 } 11454 11455 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11456 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11457 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11458 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11459 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11460 { 11461 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11462 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11463 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" ); 11464 # endif 11465 11466 d.vkDestroyVideoSessionKHR( 11467 m_device, 11468 static_cast<VkVideoSessionKHR>( videoSession ), 11469 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11470 } 11471 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11472 11473 template <typename Dispatch> 11474 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t * pMemoryRequirementsCount,VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,Dispatch const & d) const11475 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11476 uint32_t * pMemoryRequirementsCount, 11477 VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, 11478 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11479 { 11480 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11481 return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), 11482 static_cast<VkVideoSessionKHR>( videoSession ), 11483 pMemoryRequirementsCount, 11484 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) ); 11485 } 11486 11487 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11488 template <typename VideoSessionMemoryRequirementsKHRAllocator, 11489 typename Dispatch, 11490 typename std::enable_if< 11491 std::is_same<typename VideoSessionMemoryRequirementsKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value, 11492 int>::type> 11493 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11494 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Dispatch const & d) const11495 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const 11496 { 11497 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11498 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11499 VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" ); 11500 # endif 11501 11502 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements; 11503 uint32_t memoryRequirementsCount; 11504 VULKAN_HPP_NAMESPACE::Result result; 11505 do 11506 { 11507 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11508 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); 11509 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) 11510 { 11511 memoryRequirements.resize( memoryRequirementsCount ); 11512 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11513 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 11514 static_cast<VkVideoSessionKHR>( videoSession ), 11515 &memoryRequirementsCount, 11516 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); 11517 } 11518 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11519 11520 VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); 11521 if ( memoryRequirementsCount < memoryRequirements.size() ) 11522 { 11523 memoryRequirements.resize( memoryRequirementsCount ); 11524 } 11525 return memoryRequirements; 11526 } 11527 11528 template <typename VideoSessionMemoryRequirementsKHRAllocator, 11529 typename Dispatch, 11530 typename std::enable_if< 11531 std::is_same<typename VideoSessionMemoryRequirementsKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value, 11532 int>::type> 11533 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11534 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,Dispatch const & d) const11535 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11536 VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, 11537 Dispatch const & d ) const 11538 { 11539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11540 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11541 VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" ); 11542 # endif 11543 11544 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements( 11545 videoSessionMemoryRequirementsKHRAllocator ); 11546 uint32_t memoryRequirementsCount; 11547 VULKAN_HPP_NAMESPACE::Result result; 11548 do 11549 { 11550 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11551 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); 11552 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) 11553 { 11554 memoryRequirements.resize( memoryRequirementsCount ); 11555 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11556 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 11557 static_cast<VkVideoSessionKHR>( videoSession ), 11558 &memoryRequirementsCount, 11559 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); 11560 } 11561 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11562 11563 VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); 11564 if ( memoryRequirementsCount < memoryRequirements.size() ) 11565 { 11566 memoryRequirements.resize( memoryRequirementsCount ); 11567 } 11568 return memoryRequirements; 11569 } 11570 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11571 11572 template <typename Dispatch> 11573 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t bindSessionMemoryInfoCount,const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,Dispatch const & d) const11574 Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11575 uint32_t bindSessionMemoryInfoCount, 11576 const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, 11577 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11578 { 11579 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11580 return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( static_cast<VkDevice>( m_device ), 11581 static_cast<VkVideoSessionKHR>( videoSession ), 11582 bindSessionMemoryInfoCount, 11583 reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) ); 11584 } 11585 11586 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11587 template <typename Dispatch> bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos,Dispatch const & d) const11588 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindVideoSessionMemoryKHR( 11589 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 11590 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos, 11591 Dispatch const & d ) const 11592 { 11593 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11594 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11595 VULKAN_HPP_ASSERT( d.vkBindVideoSessionMemoryKHR && "Function <vkBindVideoSessionMemoryKHR> requires <VK_KHR_video_queue>" ); 11596 # endif 11597 11598 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11599 d.vkBindVideoSessionMemoryKHR( m_device, 11600 static_cast<VkVideoSessionKHR>( videoSession ), 11601 bindSessionMemoryInfos.size(), 11602 reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) ) ); 11603 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); 11604 11605 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 11606 } 11607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11608 11609 template <typename Dispatch> 11610 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,Dispatch const & d) const11611 Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, 11612 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11613 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, 11614 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11615 { 11616 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11617 return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), 11618 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ), 11619 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11620 reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) ); 11621 } 11622 11623 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11624 template <typename Dispatch> 11625 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type createVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11626 Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, 11627 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11628 Dispatch const & d ) const 11629 { 11630 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11631 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11632 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11633 # endif 11634 11635 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 11636 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR( 11637 m_device, 11638 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 11639 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11640 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 11641 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); 11642 11643 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSessionParameters ) ); 11644 } 11645 11646 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11647 template <typename Dispatch> 11648 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type createVideoSessionParametersKHRUnique(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11649 Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, 11650 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11651 Dispatch const & d ) const 11652 { 11653 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11654 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11655 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11656 # endif 11657 11658 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 11659 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR( 11660 m_device, 11661 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 11662 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11663 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 11664 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); 11665 11666 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 11667 UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>( 11668 videoSessionParameters, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 11669 } 11670 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11671 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11672 11673 template <typename Dispatch> 11674 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,Dispatch const & d) const11675 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11676 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, 11677 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11678 { 11679 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11680 return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), 11681 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11682 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) ); 11683 } 11684 11685 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11686 template <typename Dispatch> 11687 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,Dispatch const & d) const11688 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11689 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, 11690 Dispatch const & d ) const 11691 { 11692 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11693 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11694 VULKAN_HPP_ASSERT( d.vkUpdateVideoSessionParametersKHR && "Function <vkUpdateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11695 # endif 11696 11697 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11698 d.vkUpdateVideoSessionParametersKHR( m_device, 11699 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11700 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) ); 11701 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); 11702 11703 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 11704 } 11705 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11706 11707 template <typename Dispatch> destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11708 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11709 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11710 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11711 { 11712 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11713 d.vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), 11714 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11715 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11716 } 11717 11718 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11719 template <typename Dispatch> destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11720 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11721 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11722 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11723 { 11724 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11725 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11726 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11727 # endif 11728 11729 d.vkDestroyVideoSessionParametersKHR( 11730 m_device, 11731 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11732 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11733 } 11734 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11735 11736 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11737 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11738 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11739 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11740 { 11741 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11742 d.vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), 11743 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11744 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11745 } 11746 11747 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11748 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11749 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 11750 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11751 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11752 { 11753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11754 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11755 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 11756 # endif 11757 11758 d.vkDestroyVideoSessionParametersKHR( 11759 m_device, 11760 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 11761 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11762 } 11763 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11764 11765 template <typename Dispatch> beginVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,Dispatch const & d) const11766 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, 11767 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11768 { 11769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11770 d.vkCmdBeginVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) ); 11771 } 11772 11773 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11774 template <typename Dispatch> beginVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,Dispatch const & d) const11775 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, 11776 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11777 { 11778 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11779 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11780 VULKAN_HPP_ASSERT( d.vkCmdBeginVideoCodingKHR && "Function <vkCmdBeginVideoCodingKHR> requires <VK_KHR_video_queue>" ); 11781 # endif 11782 11783 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) ); 11784 } 11785 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11786 11787 template <typename Dispatch> endVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,Dispatch const & d) const11788 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, 11789 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11790 { 11791 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11792 d.vkCmdEndVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) ); 11793 } 11794 11795 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11796 template <typename Dispatch> endVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,Dispatch const & d) const11797 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, 11798 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11799 { 11800 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11801 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11802 VULKAN_HPP_ASSERT( d.vkCmdEndVideoCodingKHR && "Function <vkCmdEndVideoCodingKHR> requires <VK_KHR_video_queue>" ); 11803 # endif 11804 11805 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) ); 11806 } 11807 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11808 11809 template <typename Dispatch> controlVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,Dispatch const & d) const11810 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, 11811 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11812 { 11813 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11814 d.vkCmdControlVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 11815 reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) ); 11816 } 11817 11818 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11819 template <typename Dispatch> controlVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,Dispatch const & d) const11820 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, 11821 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11822 { 11823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11824 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11825 VULKAN_HPP_ASSERT( d.vkCmdControlVideoCodingKHR && "Function <vkCmdControlVideoCodingKHR> requires <VK_KHR_video_queue>" ); 11826 # endif 11827 11828 d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) ); 11829 } 11830 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11831 11832 //=== VK_KHR_video_decode_queue === 11833 11834 template <typename Dispatch> decodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,Dispatch const & d) const11835 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, 11836 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11837 { 11838 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11839 d.vkCmdDecodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) ); 11840 } 11841 11842 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11843 template <typename Dispatch> decodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,Dispatch const & d) const11844 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, 11845 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11846 { 11847 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11848 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11849 VULKAN_HPP_ASSERT( d.vkCmdDecodeVideoKHR && "Function <vkCmdDecodeVideoKHR> requires <VK_KHR_video_decode_queue>" ); 11850 # endif 11851 11852 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) ); 11853 } 11854 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11855 11856 //=== VK_EXT_transform_feedback === 11857 11858 template <typename Dispatch> bindTransformFeedbackBuffersEXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,Dispatch const & d) const11859 VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 11860 uint32_t bindingCount, 11861 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 11862 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 11863 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 11864 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11865 { 11866 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11867 d.vkCmdBindTransformFeedbackBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 11868 firstBinding, 11869 bindingCount, 11870 reinterpret_cast<const VkBuffer *>( pBuffers ), 11871 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 11872 reinterpret_cast<const VkDeviceSize *>( pSizes ) ); 11873 } 11874 11875 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11876 template <typename Dispatch> 11877 VULKAN_HPP_INLINE void bindTransformFeedbackBuffersEXT(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,Dispatch const & d) const11878 CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 11879 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 11880 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 11881 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 11882 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 11883 { 11884 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11885 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11886 VULKAN_HPP_ASSERT( d.vkCmdBindTransformFeedbackBuffersEXT && "Function <vkCmdBindTransformFeedbackBuffersEXT> requires <VK_EXT_transform_feedback>" ); 11887 # endif 11888 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11889 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 11890 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 11891 # else 11892 if ( buffers.size() != offsets.size() ) 11893 { 11894 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); 11895 } 11896 if ( !sizes.empty() && buffers.size() != sizes.size() ) 11897 { 11898 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); 11899 } 11900 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11901 11902 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, 11903 firstBinding, 11904 buffers.size(), 11905 reinterpret_cast<const VkBuffer *>( buffers.data() ), 11906 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 11907 reinterpret_cast<const VkDeviceSize *>( sizes.data() ) ); 11908 } 11909 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11910 11911 template <typename Dispatch> beginTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const11912 VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, 11913 uint32_t counterBufferCount, 11914 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 11915 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 11916 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11917 { 11918 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11919 d.vkCmdBeginTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 11920 firstCounterBuffer, 11921 counterBufferCount, 11922 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 11923 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 11924 } 11925 11926 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11927 template <typename Dispatch> 11928 VULKAN_HPP_INLINE void beginTransformFeedbackEXT(uint32_t firstCounterBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const11929 CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, 11930 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 11931 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 11932 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 11933 { 11934 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11935 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11936 VULKAN_HPP_ASSERT( d.vkCmdBeginTransformFeedbackEXT && "Function <vkCmdBeginTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" ); 11937 # endif 11938 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11939 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 11940 # else 11941 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 11942 { 11943 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 11944 } 11945 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11946 11947 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, 11948 firstCounterBuffer, 11949 counterBuffers.size(), 11950 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 11951 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 11952 } 11953 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11954 11955 template <typename Dispatch> endTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const11956 VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, 11957 uint32_t counterBufferCount, 11958 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 11959 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 11960 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11961 { 11962 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11963 d.vkCmdEndTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 11964 firstCounterBuffer, 11965 counterBufferCount, 11966 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 11967 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 11968 } 11969 11970 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11971 template <typename Dispatch> 11972 VULKAN_HPP_INLINE void endTransformFeedbackEXT(uint32_t firstCounterBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const11973 CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, 11974 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 11975 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 11976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 11977 { 11978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11979 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11980 VULKAN_HPP_ASSERT( d.vkCmdEndTransformFeedbackEXT && "Function <vkCmdEndTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" ); 11981 # endif 11982 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11983 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 11984 # else 11985 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 11986 { 11987 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 11988 } 11989 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11990 11991 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, 11992 firstCounterBuffer, 11993 counterBuffers.size(), 11994 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 11995 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 11996 } 11997 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11998 11999 template <typename Dispatch> beginQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,uint32_t index,Dispatch const & d) const12000 VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 12001 uint32_t query, 12002 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 12003 uint32_t index, 12004 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12005 { 12006 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12007 d.vkCmdBeginQueryIndexedEXT( 12008 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index ); 12009 } 12010 12011 template <typename Dispatch> 12012 VULKAN_HPP_INLINE void endQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,uint32_t index,Dispatch const & d) const12013 CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12014 { 12015 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12016 d.vkCmdEndQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, index ); 12017 } 12018 12019 template <typename Dispatch> drawIndirectByteCountEXT(uint32_t instanceCount,uint32_t firstInstance,VULKAN_HPP_NAMESPACE::Buffer counterBuffer,VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,uint32_t counterOffset,uint32_t vertexStride,Dispatch const & d) const12020 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, 12021 uint32_t firstInstance, 12022 VULKAN_HPP_NAMESPACE::Buffer counterBuffer, 12023 VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, 12024 uint32_t counterOffset, 12025 uint32_t vertexStride, 12026 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12027 { 12028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12029 d.vkCmdDrawIndirectByteCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 12030 instanceCount, 12031 firstInstance, 12032 static_cast<VkBuffer>( counterBuffer ), 12033 static_cast<VkDeviceSize>( counterBufferOffset ), 12034 counterOffset, 12035 vertexStride ); 12036 } 12037 12038 //=== VK_NVX_binary_import === 12039 12040 template <typename Dispatch> createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,Dispatch const & d) const12041 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, 12042 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12043 VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, 12044 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12045 { 12046 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12047 return static_cast<Result>( d.vkCreateCuModuleNVX( static_cast<VkDevice>( m_device ), 12048 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ), 12049 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12050 reinterpret_cast<VkCuModuleNVX *>( pModule ) ) ); 12051 } 12052 12053 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12054 template <typename Dispatch> 12055 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12056 Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, 12057 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12058 Dispatch const & d ) const 12059 { 12060 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12061 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12062 VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" ); 12063 # endif 12064 12065 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 12066 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12067 d.vkCreateCuModuleNVX( m_device, 12068 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 12069 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12070 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 12071 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); 12072 12073 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); 12074 } 12075 12076 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12077 template <typename Dispatch> 12078 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type createCuModuleNVXUnique(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12079 Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, 12080 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12081 Dispatch const & d ) const 12082 { 12083 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12084 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12085 VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" ); 12086 # endif 12087 12088 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 12089 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12090 d.vkCreateCuModuleNVX( m_device, 12091 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 12092 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12093 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 12094 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); 12095 12096 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 12097 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( module, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 12098 } 12099 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12100 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12101 12102 template <typename Dispatch> createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,Dispatch const & d) const12103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, 12104 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12105 VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, 12106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12107 { 12108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12109 return static_cast<Result>( d.vkCreateCuFunctionNVX( static_cast<VkDevice>( m_device ), 12110 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ), 12111 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12112 reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) ); 12113 } 12114 12115 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12116 template <typename Dispatch> 12117 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12118 Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, 12119 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12120 Dispatch const & d ) const 12121 { 12122 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12123 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12124 VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" ); 12125 # endif 12126 12127 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 12128 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12129 d.vkCreateCuFunctionNVX( m_device, 12130 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 12131 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12132 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 12133 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); 12134 12135 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); 12136 } 12137 12138 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12139 template <typename Dispatch> 12140 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type createCuFunctionNVXUnique(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12141 Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, 12142 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12143 Dispatch const & d ) const 12144 { 12145 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12146 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12147 VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" ); 12148 # endif 12149 12150 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 12151 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12152 d.vkCreateCuFunctionNVX( m_device, 12153 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 12154 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12155 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 12156 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); 12157 12158 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 12159 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( function, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 12160 } 12161 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12162 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12163 12164 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const12165 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 12166 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12167 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12168 { 12169 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12170 d.vkDestroyCuModuleNVX( 12171 static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 12172 } 12173 12174 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12175 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12176 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 12177 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12178 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12179 { 12180 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12181 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12182 VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" ); 12183 # endif 12184 12185 d.vkDestroyCuModuleNVX( m_device, 12186 static_cast<VkCuModuleNVX>( module ), 12187 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 12188 } 12189 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12190 12191 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const12192 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 12193 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12194 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12195 { 12196 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12197 d.vkDestroyCuModuleNVX( 12198 static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 12199 } 12200 12201 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12202 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12203 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 12204 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12205 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12206 { 12207 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12208 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12209 VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" ); 12210 # endif 12211 12212 d.vkDestroyCuModuleNVX( m_device, 12213 static_cast<VkCuModuleNVX>( module ), 12214 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 12215 } 12216 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12217 12218 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const12219 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 12220 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12221 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12222 { 12223 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12224 d.vkDestroyCuFunctionNVX( 12225 static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 12226 } 12227 12228 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12229 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12230 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 12231 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12232 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12233 { 12234 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12235 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12236 VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" ); 12237 # endif 12238 12239 d.vkDestroyCuFunctionNVX( m_device, 12240 static_cast<VkCuFunctionNVX>( function ), 12241 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 12242 } 12243 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12244 12245 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const12246 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 12247 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12248 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12249 { 12250 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12251 d.vkDestroyCuFunctionNVX( 12252 static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 12253 } 12254 12255 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12256 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12257 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 12258 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12259 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12260 { 12261 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12262 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12263 VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" ); 12264 # endif 12265 12266 d.vkDestroyCuFunctionNVX( m_device, 12267 static_cast<VkCuFunctionNVX>( function ), 12268 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 12269 } 12270 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12271 12272 template <typename Dispatch> cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,Dispatch const & d) const12273 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, 12274 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12275 { 12276 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12277 d.vkCmdCuLaunchKernelNVX( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) ); 12278 } 12279 12280 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12281 template <typename Dispatch> cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo,Dispatch const & d) const12282 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, 12283 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12284 { 12285 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12286 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12287 VULKAN_HPP_ASSERT( d.vkCmdCuLaunchKernelNVX && "Function <vkCmdCuLaunchKernelNVX> requires <VK_NVX_binary_import>" ); 12288 # endif 12289 12290 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) ); 12291 } 12292 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12293 12294 //=== VK_NVX_image_view_handle === 12295 12296 template <typename Dispatch> getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,Dispatch const & d) const12297 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, 12298 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12299 { 12300 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12301 return d.vkGetImageViewHandleNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); 12302 } 12303 12304 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12305 template <typename Dispatch> getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,Dispatch const & d) const12306 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, 12307 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12308 { 12309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12310 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12311 VULKAN_HPP_ASSERT( d.vkGetImageViewHandleNVX && "Function <vkGetImageViewHandleNVX> requires <VK_NVX_image_view_handle>" ); 12312 # endif 12313 12314 uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) ); 12315 12316 return result; 12317 } 12318 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12319 12320 template <typename Dispatch> getImageViewHandle64NVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,Dispatch const & d) const12321 VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, 12322 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12323 { 12324 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12325 return d.vkGetImageViewHandle64NVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); 12326 } 12327 12328 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12329 template <typename Dispatch> getImageViewHandle64NVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,Dispatch const & d) const12330 VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, 12331 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12332 { 12333 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12334 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12335 VULKAN_HPP_ASSERT( d.vkGetImageViewHandle64NVX && "Function <vkGetImageViewHandle64NVX> requires <VK_NVX_image_view_handle>" ); 12336 # endif 12337 12338 uint64_t result = d.vkGetImageViewHandle64NVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) ); 12339 12340 return result; 12341 } 12342 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12343 12344 template <typename Dispatch> getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,Dispatch const & d) const12345 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, 12346 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, 12347 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12348 { 12349 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12350 return static_cast<Result>( d.vkGetImageViewAddressNVX( 12351 static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) ); 12352 } 12353 12354 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12355 template <typename Dispatch> 12356 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,Dispatch const & d) const12357 Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const 12358 { 12359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12360 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12361 VULKAN_HPP_ASSERT( d.vkGetImageViewAddressNVX && "Function <vkGetImageViewAddressNVX> requires <VK_NVX_image_view_handle>" ); 12362 # endif 12363 12364 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; 12365 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12366 d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) ); 12367 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); 12368 12369 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 12370 } 12371 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12372 12373 //=== VK_AMD_draw_indirect_count === 12374 12375 template <typename Dispatch> drawIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const12376 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 12377 VULKAN_HPP_NAMESPACE::DeviceSize offset, 12378 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 12379 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 12380 uint32_t maxDrawCount, 12381 uint32_t stride, 12382 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12383 { 12384 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12385 d.vkCmdDrawIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), 12386 static_cast<VkBuffer>( buffer ), 12387 static_cast<VkDeviceSize>( offset ), 12388 static_cast<VkBuffer>( countBuffer ), 12389 static_cast<VkDeviceSize>( countBufferOffset ), 12390 maxDrawCount, 12391 stride ); 12392 } 12393 12394 template <typename Dispatch> drawIndexedIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const12395 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 12396 VULKAN_HPP_NAMESPACE::DeviceSize offset, 12397 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 12398 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 12399 uint32_t maxDrawCount, 12400 uint32_t stride, 12401 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12402 { 12403 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12404 d.vkCmdDrawIndexedIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), 12405 static_cast<VkBuffer>( buffer ), 12406 static_cast<VkDeviceSize>( offset ), 12407 static_cast<VkBuffer>( countBuffer ), 12408 static_cast<VkDeviceSize>( countBufferOffset ), 12409 maxDrawCount, 12410 stride ); 12411 } 12412 12413 //=== VK_AMD_shader_info === 12414 12415 template <typename Dispatch> getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,size_t * pInfoSize,void * pInfo,Dispatch const & d) const12416 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 12417 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 12418 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 12419 size_t * pInfoSize, 12420 void * pInfo, 12421 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12422 { 12423 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12424 return static_cast<Result>( d.vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ), 12425 static_cast<VkPipeline>( pipeline ), 12426 static_cast<VkShaderStageFlagBits>( shaderStage ), 12427 static_cast<VkShaderInfoTypeAMD>( infoType ), 12428 pInfoSize, 12429 pInfo ) ); 12430 } 12431 12432 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12433 template <typename Uint8_tAllocator, 12434 typename Dispatch, 12435 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 12436 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Dispatch const & d) const12437 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 12438 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 12439 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 12440 Dispatch const & d ) const 12441 { 12442 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12443 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12444 VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" ); 12445 # endif 12446 12447 std::vector<uint8_t, Uint8_tAllocator> info; 12448 size_t infoSize; 12449 VULKAN_HPP_NAMESPACE::Result result; 12450 do 12451 { 12452 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 12453 static_cast<VkPipeline>( pipeline ), 12454 static_cast<VkShaderStageFlagBits>( shaderStage ), 12455 static_cast<VkShaderInfoTypeAMD>( infoType ), 12456 &infoSize, 12457 nullptr ) ); 12458 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) 12459 { 12460 info.resize( infoSize ); 12461 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 12462 static_cast<VkPipeline>( pipeline ), 12463 static_cast<VkShaderStageFlagBits>( shaderStage ), 12464 static_cast<VkShaderInfoTypeAMD>( infoType ), 12465 &infoSize, 12466 reinterpret_cast<void *>( info.data() ) ) ); 12467 } 12468 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12469 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 12470 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 12471 if ( infoSize < info.size() ) 12472 { 12473 info.resize( infoSize ); 12474 } 12475 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); 12476 } 12477 12478 template <typename Uint8_tAllocator, 12479 typename Dispatch, 12480 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 12481 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const12482 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 12483 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 12484 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 12485 Uint8_tAllocator & uint8_tAllocator, 12486 Dispatch const & d ) const 12487 { 12488 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12489 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12490 VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" ); 12491 # endif 12492 12493 std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator ); 12494 size_t infoSize; 12495 VULKAN_HPP_NAMESPACE::Result result; 12496 do 12497 { 12498 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 12499 static_cast<VkPipeline>( pipeline ), 12500 static_cast<VkShaderStageFlagBits>( shaderStage ), 12501 static_cast<VkShaderInfoTypeAMD>( infoType ), 12502 &infoSize, 12503 nullptr ) ); 12504 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) 12505 { 12506 info.resize( infoSize ); 12507 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 12508 static_cast<VkPipeline>( pipeline ), 12509 static_cast<VkShaderStageFlagBits>( shaderStage ), 12510 static_cast<VkShaderInfoTypeAMD>( infoType ), 12511 &infoSize, 12512 reinterpret_cast<void *>( info.data() ) ) ); 12513 } 12514 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12515 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 12516 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 12517 if ( infoSize < info.size() ) 12518 { 12519 info.resize( infoSize ); 12520 } 12521 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); 12522 } 12523 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12524 12525 //=== VK_KHR_dynamic_rendering === 12526 12527 template <typename Dispatch> beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,Dispatch const & d) const12528 VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 12529 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12530 { 12531 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12532 d.vkCmdBeginRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); 12533 } 12534 12535 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12536 template <typename Dispatch> beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const12537 VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, 12538 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12539 { 12540 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12541 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12542 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderingKHR && "Function <vkCmdBeginRenderingKHR> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" ); 12543 # endif 12544 12545 d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); 12546 } 12547 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12548 12549 template <typename Dispatch> endRenderingKHR(Dispatch const & d) const12550 VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12551 { 12552 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12553 d.vkCmdEndRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ) ); 12554 } 12555 12556 #if defined( VK_USE_PLATFORM_GGP ) 12557 //=== VK_GGP_stream_descriptor_surface === 12558 12559 template <typename Dispatch> 12560 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createStreamDescriptorSurfaceGGP(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const12561 Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, 12562 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12563 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 12564 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12565 { 12566 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12567 return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( static_cast<VkInstance>( m_instance ), 12568 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), 12569 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12570 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 12571 } 12572 12573 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12574 template <typename Dispatch> 12575 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12576 Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, 12577 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12578 Dispatch const & d ) const 12579 { 12580 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12581 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12582 VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" ); 12583 # endif 12584 12585 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12586 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP( 12587 m_instance, 12588 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 12589 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12590 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12591 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); 12592 12593 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 12594 } 12595 12596 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12597 template <typename Dispatch> 12598 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createStreamDescriptorSurfaceGGPUnique(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12599 Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, 12600 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12601 Dispatch const & d ) const 12602 { 12603 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12604 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12605 VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" ); 12606 # endif 12607 12608 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12609 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP( 12610 m_instance, 12611 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 12612 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12613 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12614 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); 12615 12616 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 12617 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 12618 } 12619 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12620 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12621 #endif /*VK_USE_PLATFORM_GGP*/ 12622 12623 //=== VK_NV_external_memory_capabilities === 12624 12625 template <typename Dispatch> 12626 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,Dispatch const & d) const12627 PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, 12628 VULKAN_HPP_NAMESPACE::ImageType type, 12629 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 12630 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 12631 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 12632 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 12633 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, 12634 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12635 { 12636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12637 return static_cast<Result>( 12638 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), 12639 static_cast<VkFormat>( format ), 12640 static_cast<VkImageType>( type ), 12641 static_cast<VkImageTiling>( tiling ), 12642 static_cast<VkImageUsageFlags>( usage ), 12643 static_cast<VkImageCreateFlags>( flags ), 12644 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 12645 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) ); 12646 } 12647 12648 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12649 template <typename Dispatch> 12650 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,Dispatch const & d) const12651 PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, 12652 VULKAN_HPP_NAMESPACE::ImageType type, 12653 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 12654 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 12655 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 12656 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 12657 Dispatch const & d ) const 12658 { 12659 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12660 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12661 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV && 12662 "Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> requires <VK_NV_external_memory_capabilities>" ); 12663 # endif 12664 12665 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; 12666 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12667 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, 12668 static_cast<VkFormat>( format ), 12669 static_cast<VkImageType>( type ), 12670 static_cast<VkImageTiling>( tiling ), 12671 static_cast<VkImageUsageFlags>( usage ), 12672 static_cast<VkImageCreateFlags>( flags ), 12673 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 12674 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) ); 12675 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); 12676 12677 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( externalImageFormatProperties ) ); 12678 } 12679 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12680 12681 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 12682 //=== VK_NV_external_memory_win32 === 12683 12684 template <typename Dispatch> getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,HANDLE * pHandle,Dispatch const & d) const12685 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 12686 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, 12687 HANDLE * pHandle, 12688 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12689 { 12690 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12691 return static_cast<Result>( d.vkGetMemoryWin32HandleNV( 12692 static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) ); 12693 } 12694 12695 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12696 template <typename Dispatch> getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,Dispatch const & d) const12697 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( 12698 VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const 12699 { 12700 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12701 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12702 VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleNV && "Function <vkGetMemoryWin32HandleNV> requires <VK_NV_external_memory_win32>" ); 12703 # endif 12704 12705 HANDLE handle; 12706 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12707 d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) ); 12708 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); 12709 12710 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 12711 } 12712 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12713 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 12714 12715 //=== VK_KHR_get_physical_device_properties2 === 12716 12717 template <typename Dispatch> getFeatures2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const12718 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, 12719 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12720 { 12721 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12722 d.vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 12723 } 12724 12725 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12726 template <typename Dispatch> 12727 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const & d) const12728 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12729 { 12730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12731 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12732 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && 12733 "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12734 # endif 12735 12736 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 12737 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 12738 12739 return features; 12740 } 12741 12742 template <typename X, typename Y, typename... Z, typename Dispatch> 12743 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const & d) const12744 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12745 { 12746 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12747 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12748 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && 12749 "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12750 # endif 12751 12752 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 12753 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 12754 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 12755 12756 return structureChain; 12757 } 12758 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12759 12760 template <typename Dispatch> getProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const12761 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 12762 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12763 { 12764 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12765 d.vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 12766 } 12767 12768 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12769 template <typename Dispatch> 12770 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const & d) const12771 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12772 { 12773 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12774 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12775 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && 12776 "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12777 # endif 12778 12779 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 12780 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 12781 12782 return properties; 12783 } 12784 12785 template <typename X, typename Y, typename... Z, typename Dispatch> 12786 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const & d) const12787 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12788 { 12789 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12790 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12791 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && 12792 "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12793 # endif 12794 12795 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 12796 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 12797 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 12798 12799 return structureChain; 12800 } 12801 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12802 12803 template <typename Dispatch> getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const12804 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, 12805 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 12806 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12807 { 12808 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12809 d.vkGetPhysicalDeviceFormatProperties2KHR( 12810 static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 12811 } 12812 12813 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12814 template <typename Dispatch> 12815 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const12816 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12817 { 12818 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12819 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12820 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && 12821 "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12822 # endif 12823 12824 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 12825 d.vkGetPhysicalDeviceFormatProperties2KHR( 12826 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 12827 12828 return formatProperties; 12829 } 12830 12831 template <typename X, typename Y, typename... Z, typename Dispatch> 12832 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const12833 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12834 { 12835 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12836 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12837 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && 12838 "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12839 # endif 12840 12841 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 12842 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 12843 d.vkGetPhysicalDeviceFormatProperties2KHR( 12844 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 12845 12846 return structureChain; 12847 } 12848 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12849 12850 template <typename Dispatch> 12851 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const12852 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 12853 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 12854 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12855 { 12856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12857 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 12858 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 12859 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 12860 } 12861 12862 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12863 template <typename Dispatch> 12864 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const12865 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 12866 { 12867 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12868 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12869 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && 12870 "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12871 # endif 12872 12873 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 12874 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12875 d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 12876 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 12877 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 12878 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 12879 12880 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 12881 } 12882 12883 template <typename X, typename Y, typename... Z, typename Dispatch> 12884 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const12885 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 12886 { 12887 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12888 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12889 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && 12890 "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12891 # endif 12892 12893 StructureChain<X, Y, Z...> structureChain; 12894 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 12895 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12896 d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 12897 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 12898 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 12899 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 12900 12901 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 12902 } 12903 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12904 12905 template <typename Dispatch> getQueueFamilyProperties2KHR(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const12906 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, 12907 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 12908 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12909 { 12910 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12911 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12912 static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 12913 } 12914 12915 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12916 template < 12917 typename QueueFamilyProperties2Allocator, 12918 typename Dispatch, 12919 typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 12920 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(Dispatch const & d) const12921 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 12922 { 12923 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12924 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12925 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12926 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12927 # endif 12928 12929 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 12930 uint32_t queueFamilyPropertyCount; 12931 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12932 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12933 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12934 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12935 12936 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12937 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12938 { 12939 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12940 } 12941 return queueFamilyProperties; 12942 } 12943 12944 template < 12945 typename QueueFamilyProperties2Allocator, 12946 typename Dispatch, 12947 typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 12948 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const12949 PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const 12950 { 12951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12952 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12953 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12954 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12955 # endif 12956 12957 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); 12958 uint32_t queueFamilyPropertyCount; 12959 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12960 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12961 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12962 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12963 12964 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12965 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12966 { 12967 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12968 } 12969 return queueFamilyProperties; 12970 } 12971 12972 template <typename StructureChain, 12973 typename StructureChainAllocator, 12974 typename Dispatch, 12975 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 12976 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(Dispatch const & d) const12977 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 12978 { 12979 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12980 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12981 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12982 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12983 # endif 12984 12985 std::vector<StructureChain, StructureChainAllocator> structureChains; 12986 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 12987 uint32_t queueFamilyPropertyCount; 12988 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12989 structureChains.resize( queueFamilyPropertyCount ); 12990 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12991 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 12992 { 12993 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 12994 } 12995 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12996 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12997 12998 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12999 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 13000 { 13001 structureChains.resize( queueFamilyPropertyCount ); 13002 } 13003 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 13004 { 13005 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 13006 } 13007 return structureChains; 13008 } 13009 13010 template <typename StructureChain, 13011 typename StructureChainAllocator, 13012 typename Dispatch, 13013 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 13014 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const13015 PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const 13016 { 13017 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13018 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13019 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 13020 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 13021 # endif 13022 13023 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 13024 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 13025 uint32_t queueFamilyPropertyCount; 13026 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 13027 structureChains.resize( queueFamilyPropertyCount ); 13028 queueFamilyProperties.resize( queueFamilyPropertyCount ); 13029 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 13030 { 13031 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 13032 } 13033 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 13034 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 13035 13036 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 13037 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 13038 { 13039 structureChains.resize( queueFamilyPropertyCount ); 13040 } 13041 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 13042 { 13043 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 13044 } 13045 return structureChains; 13046 } 13047 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13048 13049 template <typename Dispatch> getMemoryProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const13050 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 13051 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13052 { 13053 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13054 d.vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 13055 reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 13056 } 13057 13058 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13059 template <typename Dispatch> 13060 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const & d) const13061 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13062 { 13063 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13064 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13065 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && 13066 "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 13067 # endif 13068 13069 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 13070 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 13071 13072 return memoryProperties; 13073 } 13074 13075 template <typename X, typename Y, typename... Z, typename Dispatch> 13076 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2KHR(Dispatch const & d) const13077 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13078 { 13079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13080 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13081 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && 13082 "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 13083 # endif 13084 13085 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 13086 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 13087 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 13088 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 13089 13090 return structureChain; 13091 } 13092 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13093 13094 template <typename Dispatch> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const13095 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 13096 uint32_t * pPropertyCount, 13097 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 13098 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13099 { 13100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13101 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 13102 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 13103 pPropertyCount, 13104 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 13105 } 13106 13107 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13108 template < 13109 typename SparseImageFormatProperties2Allocator, 13110 typename Dispatch, 13111 typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, 13112 int>::type> 13113 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const13114 PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 13115 Dispatch const & d ) const 13116 { 13117 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13118 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13119 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && 13120 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 13121 # endif 13122 13123 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 13124 uint32_t propertyCount; 13125 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 13126 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 13127 properties.resize( propertyCount ); 13128 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 13129 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 13130 &propertyCount, 13131 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 13132 13133 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 13134 if ( propertyCount < properties.size() ) 13135 { 13136 properties.resize( propertyCount ); 13137 } 13138 return properties; 13139 } 13140 13141 template < 13142 typename SparseImageFormatProperties2Allocator, 13143 typename Dispatch, 13144 typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, 13145 int>::type> 13146 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const13147 PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 13148 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 13149 Dispatch const & d ) const 13150 { 13151 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13152 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13153 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && 13154 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 13155 # endif 13156 13157 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); 13158 uint32_t propertyCount; 13159 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 13160 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 13161 properties.resize( propertyCount ); 13162 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 13163 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 13164 &propertyCount, 13165 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 13166 13167 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 13168 if ( propertyCount < properties.size() ) 13169 { 13170 properties.resize( propertyCount ); 13171 } 13172 return properties; 13173 } 13174 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13175 13176 //=== VK_KHR_device_group === 13177 13178 template <typename Dispatch> getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const13179 VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, 13180 uint32_t localDeviceIndex, 13181 uint32_t remoteDeviceIndex, 13182 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 13183 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13184 { 13185 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13186 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( 13187 static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 13188 } 13189 13190 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13191 template <typename Dispatch> getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const13192 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( 13193 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13194 { 13195 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13196 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13197 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeaturesKHR && 13198 "Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" ); 13199 # endif 13200 13201 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 13202 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( 13203 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 13204 13205 return peerMemoryFeatures; 13206 } 13207 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13208 13209 template <typename Dispatch> setDeviceMaskKHR(uint32_t deviceMask,Dispatch const & d) const13210 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13211 { 13212 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13213 d.vkCmdSetDeviceMaskKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask ); 13214 } 13215 13216 template <typename Dispatch> dispatchBaseKHR(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const13217 VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, 13218 uint32_t baseGroupY, 13219 uint32_t baseGroupZ, 13220 uint32_t groupCountX, 13221 uint32_t groupCountY, 13222 uint32_t groupCountZ, 13223 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13224 { 13225 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13226 d.vkCmdDispatchBaseKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 13227 } 13228 13229 #if defined( VK_USE_PLATFORM_VI_NN ) 13230 //=== VK_NN_vi_surface === 13231 13232 template <typename Dispatch> createViSurfaceNN(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const13233 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, 13234 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13235 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 13236 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13237 { 13238 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13239 return static_cast<Result>( d.vkCreateViSurfaceNN( static_cast<VkInstance>( m_instance ), 13240 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), 13241 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13242 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 13243 } 13244 13245 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13246 template <typename Dispatch> 13247 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13248 Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, 13249 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13250 Dispatch const & d ) const 13251 { 13252 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13253 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13254 VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" ); 13255 # endif 13256 13257 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 13258 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13259 d.vkCreateViSurfaceNN( m_instance, 13260 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 13261 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13262 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 13263 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); 13264 13265 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 13266 } 13267 13268 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13269 template <typename Dispatch> 13270 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createViSurfaceNNUnique(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13271 Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, 13272 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13273 Dispatch const & d ) const 13274 { 13275 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13276 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13277 VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" ); 13278 # endif 13279 13280 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 13281 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13282 d.vkCreateViSurfaceNN( m_instance, 13283 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 13284 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13285 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 13286 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); 13287 13288 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 13289 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 13290 } 13291 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13292 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13293 #endif /*VK_USE_PLATFORM_VI_NN*/ 13294 13295 //=== VK_KHR_maintenance1 === 13296 13297 template <typename Dispatch> trimCommandPoolKHR(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const13298 VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 13299 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 13300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13301 { 13302 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13303 d.vkTrimCommandPoolKHR( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 13304 } 13305 13306 //=== VK_KHR_device_group_creation === 13307 13308 template <typename Dispatch> 13309 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumeratePhysicalDeviceGroupsKHR(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const13310 Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, 13311 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 13312 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13313 { 13314 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13315 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ), 13316 pPhysicalDeviceGroupCount, 13317 reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 13318 } 13319 13320 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13321 template <typename PhysicalDeviceGroupPropertiesAllocator, 13322 typename Dispatch, 13323 typename std::enable_if< 13324 std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, 13325 int>::type> 13326 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13327 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const & d) const13328 Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const 13329 { 13330 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13331 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13332 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && 13333 "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 13334 # endif 13335 13336 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 13337 uint32_t physicalDeviceGroupCount; 13338 VULKAN_HPP_NAMESPACE::Result result; 13339 do 13340 { 13341 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 13342 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 13343 { 13344 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 13345 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 13346 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 13347 } 13348 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13349 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 13350 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 13351 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 13352 { 13353 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 13354 } 13355 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 13356 } 13357 13358 template <typename PhysicalDeviceGroupPropertiesAllocator, 13359 typename Dispatch, 13360 typename std::enable_if< 13361 std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, 13362 int>::type> 13363 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13364 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const13365 Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 13366 { 13367 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13368 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13369 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && 13370 "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 13371 # endif 13372 13373 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 13374 physicalDeviceGroupPropertiesAllocator ); 13375 uint32_t physicalDeviceGroupCount; 13376 VULKAN_HPP_NAMESPACE::Result result; 13377 do 13378 { 13379 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 13380 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 13381 { 13382 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 13383 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 13384 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 13385 } 13386 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13387 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 13388 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 13389 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 13390 { 13391 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 13392 } 13393 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 13394 } 13395 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13396 13397 //=== VK_KHR_external_memory_capabilities === 13398 13399 template <typename Dispatch> getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const13400 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 13401 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 13402 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13403 { 13404 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13405 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 13406 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 13407 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 13408 } 13409 13410 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13411 template <typename Dispatch> 13412 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const13413 PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, 13414 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13415 { 13416 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13417 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13418 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferPropertiesKHR && 13419 "Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" ); 13420 # endif 13421 13422 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 13423 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, 13424 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 13425 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 13426 13427 return externalBufferProperties; 13428 } 13429 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13430 13431 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 13432 //=== VK_KHR_external_memory_win32 === 13433 13434 template <typename Dispatch> getMemoryWin32HandleKHR(const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const13435 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, 13436 HANDLE * pHandle, 13437 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13438 { 13439 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13440 return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( 13441 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 13442 } 13443 13444 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13445 template <typename Dispatch> 13446 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR(const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const13447 Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 13448 { 13449 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13450 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13451 VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleKHR && "Function <vkGetMemoryWin32HandleKHR> requires <VK_KHR_external_memory_win32>" ); 13452 # endif 13453 13454 HANDLE handle; 13455 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13456 d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 13457 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); 13458 13459 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 13460 } 13461 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13462 13463 template <typename Dispatch> 13464 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,Dispatch const & d) const13465 Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 13466 HANDLE handle, 13467 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, 13468 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13469 { 13470 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13471 return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( static_cast<VkDevice>( m_device ), 13472 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 13473 handle, 13474 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) ); 13475 } 13476 13477 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13478 template <typename Dispatch> 13479 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,Dispatch const & d) const13480 Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const 13481 { 13482 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13483 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13484 VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandlePropertiesKHR && "Function <vkGetMemoryWin32HandlePropertiesKHR> requires <VK_KHR_external_memory_win32>" ); 13485 # endif 13486 13487 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; 13488 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13489 d.vkGetMemoryWin32HandlePropertiesKHR( m_device, 13490 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 13491 handle, 13492 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) ); 13493 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); 13494 13495 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryWin32HandleProperties ) ); 13496 } 13497 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13498 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 13499 13500 //=== VK_KHR_external_memory_fd === 13501 13502 template <typename Dispatch> getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const13503 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, 13504 int * pFd, 13505 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13506 { 13507 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13508 return static_cast<Result>( d.vkGetMemoryFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 13509 } 13510 13511 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13512 template <typename Dispatch> getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,Dispatch const & d) const13513 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, 13514 Dispatch const & d ) const 13515 { 13516 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13517 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13518 VULKAN_HPP_ASSERT( d.vkGetMemoryFdKHR && "Function <vkGetMemoryFdKHR> requires <VK_KHR_external_memory_fd>" ); 13519 # endif 13520 13521 int fd; 13522 VULKAN_HPP_NAMESPACE::Result result = 13523 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 13524 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); 13525 13526 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); 13527 } 13528 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13529 13530 template <typename Dispatch> getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,Dispatch const & d) const13531 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 13532 int fd, 13533 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, 13534 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13535 { 13536 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13537 return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( static_cast<VkDevice>( m_device ), 13538 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 13539 fd, 13540 reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) ); 13541 } 13542 13543 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13544 template <typename Dispatch> 13545 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,Dispatch const & d) const13546 Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const 13547 { 13548 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13549 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13550 VULKAN_HPP_ASSERT( d.vkGetMemoryFdPropertiesKHR && "Function <vkGetMemoryFdPropertiesKHR> requires <VK_KHR_external_memory_fd>" ); 13551 # endif 13552 13553 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; 13554 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdPropertiesKHR( 13555 m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) ); 13556 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); 13557 13558 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryFdProperties ) ); 13559 } 13560 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13561 13562 //=== VK_KHR_external_semaphore_capabilities === 13563 13564 template <typename Dispatch> 13565 VULKAN_HPP_INLINE void getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const13566 PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 13567 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 13568 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13569 { 13570 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13571 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 13572 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 13573 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 13574 } 13575 13576 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13577 template <typename Dispatch> 13578 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const13579 PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, 13580 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13581 { 13582 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13583 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13584 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && 13585 "Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" ); 13586 # endif 13587 13588 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 13589 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, 13590 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 13591 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 13592 13593 return externalSemaphoreProperties; 13594 } 13595 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13596 13597 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 13598 //=== VK_KHR_external_semaphore_win32 === 13599 13600 template <typename Dispatch> importSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,Dispatch const & d) const13601 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( 13602 const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13603 { 13604 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13605 return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( 13606 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) ); 13607 } 13608 13609 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13610 template <typename Dispatch> 13611 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,Dispatch const & d) const13612 Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, 13613 Dispatch const & d ) const 13614 { 13615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13616 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13617 VULKAN_HPP_ASSERT( d.vkImportSemaphoreWin32HandleKHR && "Function <vkImportSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" ); 13618 # endif 13619 13620 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13621 d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) ); 13622 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); 13623 13624 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13625 } 13626 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13627 13628 template <typename Dispatch> getSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const13629 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( 13630 const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13631 { 13632 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13633 return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( 13634 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 13635 } 13636 13637 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13638 template <typename Dispatch> 13639 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const13640 Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 13641 { 13642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13644 VULKAN_HPP_ASSERT( d.vkGetSemaphoreWin32HandleKHR && "Function <vkGetSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" ); 13645 # endif 13646 13647 HANDLE handle; 13648 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13649 d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 13650 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); 13651 13652 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 13653 } 13654 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13655 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 13656 13657 //=== VK_KHR_external_semaphore_fd === 13658 13659 template <typename Dispatch> importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,Dispatch const & d) const13660 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, 13661 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13662 { 13663 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13664 return static_cast<Result>( 13665 d.vkImportSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) ); 13666 } 13667 13668 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13669 template <typename Dispatch> 13670 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,Dispatch const & d) const13671 Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const 13672 { 13673 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13674 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13675 VULKAN_HPP_ASSERT( d.vkImportSemaphoreFdKHR && "Function <vkImportSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" ); 13676 # endif 13677 13678 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13679 d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) ); 13680 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); 13681 13682 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13683 } 13684 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13685 13686 template <typename Dispatch> getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const13687 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, 13688 int * pFd, 13689 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13690 { 13691 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13692 return static_cast<Result>( 13693 d.vkGetSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 13694 } 13695 13696 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13697 template <typename Dispatch> 13698 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo,Dispatch const & d) const13699 Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const 13700 { 13701 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13702 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13703 VULKAN_HPP_ASSERT( d.vkGetSemaphoreFdKHR && "Function <vkGetSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" ); 13704 # endif 13705 13706 int fd; 13707 VULKAN_HPP_NAMESPACE::Result result = 13708 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 13709 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); 13710 13711 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); 13712 } 13713 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13714 13715 //=== VK_KHR_push_descriptor === 13716 13717 template <typename Dispatch> pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,Dispatch const & d) const13718 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 13719 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 13720 uint32_t set, 13721 uint32_t descriptorWriteCount, 13722 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 13723 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13724 { 13725 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13726 d.vkCmdPushDescriptorSetKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 13727 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 13728 static_cast<VkPipelineLayout>( layout ), 13729 set, 13730 descriptorWriteCount, 13731 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); 13732 } 13733 13734 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13735 template <typename Dispatch> 13736 VULKAN_HPP_INLINE void pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,Dispatch const & d) const13737 CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 13738 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 13739 uint32_t set, 13740 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 13741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13742 { 13743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13744 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13745 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetKHR && "Function <vkCmdPushDescriptorSetKHR> requires <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); 13746 # endif 13747 13748 d.vkCmdPushDescriptorSetKHR( m_commandBuffer, 13749 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 13750 static_cast<VkPipelineLayout>( layout ), 13751 set, 13752 descriptorWrites.size(), 13753 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); 13754 } 13755 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13756 13757 template <typename Dispatch> pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,const void * pData,Dispatch const & d) const13758 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13759 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 13760 uint32_t set, 13761 const void * pData, 13762 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13763 { 13764 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13765 d.vkCmdPushDescriptorSetWithTemplateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 13766 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13767 static_cast<VkPipelineLayout>( layout ), 13768 set, 13769 pData ); 13770 } 13771 13772 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13773 template <typename DataType, typename Dispatch> pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,DataType const & data,Dispatch const & d) const13774 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13775 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 13776 uint32_t set, 13777 DataType const & data, 13778 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13779 { 13780 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13781 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13782 VULKAN_HPP_ASSERT( 13783 d.vkCmdPushDescriptorSetWithTemplateKHR && 13784 "Function <vkCmdPushDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); 13785 # endif 13786 13787 d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, 13788 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13789 static_cast<VkPipelineLayout>( layout ), 13790 set, 13791 reinterpret_cast<const void *>( &data ) ); 13792 } 13793 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13794 13795 //=== VK_EXT_conditional_rendering === 13796 13797 template <typename Dispatch> beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,Dispatch const & d) const13798 VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, 13799 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13800 { 13801 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13802 d.vkCmdBeginConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 13803 reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) ); 13804 } 13805 13806 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13807 template <typename Dispatch> beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,Dispatch const & d) const13808 VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, 13809 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13810 { 13811 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13812 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13813 VULKAN_HPP_ASSERT( d.vkCmdBeginConditionalRenderingEXT && "Function <vkCmdBeginConditionalRenderingEXT> requires <VK_EXT_conditional_rendering>" ); 13814 # endif 13815 13816 d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) ); 13817 } 13818 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13819 13820 template <typename Dispatch> endConditionalRenderingEXT(Dispatch const & d) const13821 VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13822 { 13823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13824 d.vkCmdEndConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) ); 13825 } 13826 13827 //=== VK_KHR_descriptor_update_template === 13828 13829 template <typename Dispatch> 13830 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const13831 Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 13832 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13833 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 13834 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13835 { 13836 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13837 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( static_cast<VkDevice>( m_device ), 13838 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 13839 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13840 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 13841 } 13842 13843 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13844 template <typename Dispatch> 13845 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13846 Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 13847 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13848 Dispatch const & d ) const 13849 { 13850 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13851 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13852 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && 13853 "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 13854 # endif 13855 13856 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 13857 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR( 13858 m_device, 13859 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 13860 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13861 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 13862 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); 13863 13864 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); 13865 } 13866 13867 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13868 template <typename Dispatch> 13869 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateKHRUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13870 Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 13871 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13872 Dispatch const & d ) const 13873 { 13874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13875 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13876 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && 13877 "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 13878 # endif 13879 13880 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 13881 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR( 13882 m_device, 13883 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 13884 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13885 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 13886 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); 13887 13888 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 13889 UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 13890 descriptorUpdateTemplate, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 13891 } 13892 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13893 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13894 13895 template <typename Dispatch> destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const13896 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13897 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13898 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13899 { 13900 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13901 d.vkDestroyDescriptorUpdateTemplateKHR( static_cast<VkDevice>( m_device ), 13902 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13903 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 13904 } 13905 13906 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13907 template <typename Dispatch> destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13908 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13909 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13910 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13911 { 13912 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13913 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13914 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplateKHR && 13915 "Function <vkDestroyDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 13916 # endif 13917 13918 d.vkDestroyDescriptorUpdateTemplateKHR( 13919 m_device, 13920 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13921 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 13922 } 13923 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13924 13925 template <typename Dispatch> updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const13926 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 13927 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13928 const void * pData, 13929 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13930 { 13931 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13932 d.vkUpdateDescriptorSetWithTemplateKHR( static_cast<VkDevice>( m_device ), 13933 static_cast<VkDescriptorSet>( descriptorSet ), 13934 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13935 pData ); 13936 } 13937 13938 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13939 template <typename DataType, typename Dispatch> updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,DataType const & data,Dispatch const & d) const13940 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 13941 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 13942 DataType const & data, 13943 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13944 { 13945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13946 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13947 VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplateKHR && 13948 "Function <vkUpdateDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 13949 # endif 13950 13951 d.vkUpdateDescriptorSetWithTemplateKHR( m_device, 13952 static_cast<VkDescriptorSet>( descriptorSet ), 13953 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13954 reinterpret_cast<const void *>( &data ) ); 13955 } 13956 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13957 13958 //=== VK_NV_clip_space_w_scaling === 13959 13960 template <typename Dispatch> setViewportWScalingNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,Dispatch const & d) const13961 VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, 13962 uint32_t viewportCount, 13963 const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, 13964 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13965 { 13966 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13967 d.vkCmdSetViewportWScalingNV( 13968 static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) ); 13969 } 13970 13971 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13972 template <typename Dispatch> 13973 VULKAN_HPP_INLINE void setViewportWScalingNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,Dispatch const & d) const13974 CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, 13975 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, 13976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13977 { 13978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13979 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13980 VULKAN_HPP_ASSERT( d.vkCmdSetViewportWScalingNV && "Function <vkCmdSetViewportWScalingNV> requires <VK_NV_clip_space_w_scaling>" ); 13981 # endif 13982 13983 d.vkCmdSetViewportWScalingNV( 13984 m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) ); 13985 } 13986 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13987 13988 //=== VK_EXT_direct_mode_display === 13989 13990 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 13991 template <typename Dispatch> releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13992 VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13993 { 13994 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13995 return static_cast<Result>( d.vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( display ) ) ); 13996 } 13997 #else 13998 template <typename Dispatch> releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13999 VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14000 { 14001 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14002 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14003 VULKAN_HPP_ASSERT( d.vkReleaseDisplayEXT && "Function <vkReleaseDisplayEXT> requires <VK_EXT_direct_mode_display>" ); 14004 # endif 14005 14006 d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ); 14007 } 14008 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14009 14010 #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) 14011 //=== VK_EXT_acquire_xlib_display === 14012 14013 template <typename Dispatch> acquireXlibDisplayEXT(Display * dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const14014 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, 14015 VULKAN_HPP_NAMESPACE::DisplayKHR display, 14016 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14017 { 14018 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14019 return static_cast<Result>( d.vkAcquireXlibDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), dpy, static_cast<VkDisplayKHR>( display ) ) ); 14020 } 14021 14022 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14023 template <typename Dispatch> 14024 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireXlibDisplayEXT(Display & dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const14025 PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 14026 { 14027 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14028 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14029 VULKAN_HPP_ASSERT( d.vkAcquireXlibDisplayEXT && "Function <vkAcquireXlibDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); 14030 # endif 14031 14032 VULKAN_HPP_NAMESPACE::Result result = 14033 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) ); 14034 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); 14035 14036 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14037 } 14038 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14039 14040 template <typename Dispatch> getRandROutputDisplayEXT(Display * dpy,RROutput rrOutput,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const14041 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, 14042 RROutput rrOutput, 14043 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 14044 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14045 { 14046 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14047 return static_cast<Result>( 14048 d.vkGetRandROutputDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 14049 } 14050 14051 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14052 template <typename Dispatch> 14053 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT(Display & dpy,RROutput rrOutput,Dispatch const & d) const14054 PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 14055 { 14056 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14057 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14058 VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); 14059 # endif 14060 14061 VULKAN_HPP_NAMESPACE::DisplayKHR display; 14062 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14063 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 14064 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); 14065 14066 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); 14067 } 14068 14069 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14070 template <typename Dispatch> 14071 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getRandROutputDisplayEXTUnique(Display & dpy,RROutput rrOutput,Dispatch const & d) const14072 PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 14073 { 14074 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14075 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14076 VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); 14077 # endif 14078 14079 VULKAN_HPP_NAMESPACE::DisplayKHR display; 14080 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14081 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 14082 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); 14083 14084 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 14085 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, detail::ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 14086 } 14087 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 14088 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14089 #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ 14090 14091 //=== VK_EXT_display_surface_counter === 14092 14093 template <typename Dispatch> 14094 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,Dispatch const & d) const14095 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 14096 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, 14097 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14098 { 14099 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14100 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), 14101 static_cast<VkSurfaceKHR>( surface ), 14102 reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) ); 14103 } 14104 14105 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14106 template <typename Dispatch> 14107 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const14108 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 14109 { 14110 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14111 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14112 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT && 14113 "Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> requires <VK_EXT_display_surface_counter>" ); 14114 # endif 14115 14116 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; 14117 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( 14118 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) ); 14119 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); 14120 14121 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); 14122 } 14123 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14124 14125 //=== VK_EXT_display_control === 14126 14127 template <typename Dispatch> displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,Dispatch const & d) const14128 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 14129 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, 14130 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14131 { 14132 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14133 return static_cast<Result>( d.vkDisplayPowerControlEXT( 14134 static_cast<VkDevice>( m_device ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) ); 14135 } 14136 14137 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14138 template <typename Dispatch> displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,Dispatch const & d) const14139 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 14140 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, 14141 Dispatch const & d ) const 14142 { 14143 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14144 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14145 VULKAN_HPP_ASSERT( d.vkDisplayPowerControlEXT && "Function <vkDisplayPowerControlEXT> requires <VK_EXT_display_control>" ); 14146 # endif 14147 14148 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14149 d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) ); 14150 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); 14151 14152 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14153 } 14154 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14155 14156 template <typename Dispatch> registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const14157 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, 14158 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14159 VULKAN_HPP_NAMESPACE::Fence * pFence, 14160 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14161 { 14162 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14163 return static_cast<Result>( d.vkRegisterDeviceEventEXT( static_cast<VkDevice>( m_device ), 14164 reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), 14165 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 14166 reinterpret_cast<VkFence *>( pFence ) ) ); 14167 } 14168 14169 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14170 template <typename Dispatch> 14171 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14172 Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, 14173 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14174 Dispatch const & d ) const 14175 { 14176 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14177 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14178 VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" ); 14179 # endif 14180 14181 VULKAN_HPP_NAMESPACE::Fence fence; 14182 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT( 14183 m_device, 14184 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 14185 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14186 reinterpret_cast<VkFence *>( &fence ) ) ); 14187 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); 14188 14189 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); 14190 } 14191 14192 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14193 template <typename Dispatch> 14194 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerEventEXTUnique(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14195 Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, 14196 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14197 Dispatch const & d ) const 14198 { 14199 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14200 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14201 VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" ); 14202 # endif 14203 14204 VULKAN_HPP_NAMESPACE::Fence fence; 14205 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT( 14206 m_device, 14207 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 14208 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14209 reinterpret_cast<VkFence *>( &fence ) ) ); 14210 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); 14211 14212 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 14213 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 14214 } 14215 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 14216 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14217 14218 template <typename Dispatch> registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const14219 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 14220 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, 14221 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14222 VULKAN_HPP_NAMESPACE::Fence * pFence, 14223 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14224 { 14225 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14226 return static_cast<Result>( d.vkRegisterDisplayEventEXT( static_cast<VkDevice>( m_device ), 14227 static_cast<VkDisplayKHR>( display ), 14228 reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), 14229 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 14230 reinterpret_cast<VkFence *>( pFence ) ) ); 14231 } 14232 14233 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14234 template <typename Dispatch> 14235 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14236 Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 14237 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, 14238 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14239 Dispatch const & d ) const 14240 { 14241 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14242 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14243 VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" ); 14244 # endif 14245 14246 VULKAN_HPP_NAMESPACE::Fence fence; 14247 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT( 14248 m_device, 14249 static_cast<VkDisplayKHR>( display ), 14250 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 14251 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14252 reinterpret_cast<VkFence *>( &fence ) ) ); 14253 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); 14254 14255 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); 14256 } 14257 14258 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14259 template <typename Dispatch> 14260 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerDisplayEventEXTUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14261 Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 14262 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, 14263 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14264 Dispatch const & d ) const 14265 { 14266 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14267 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14268 VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" ); 14269 # endif 14270 14271 VULKAN_HPP_NAMESPACE::Fence fence; 14272 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT( 14273 m_device, 14274 static_cast<VkDisplayKHR>( display ), 14275 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 14276 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14277 reinterpret_cast<VkFence *>( &fence ) ) ); 14278 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); 14279 14280 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 14281 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 14282 } 14283 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 14284 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14285 14286 template <typename Dispatch> getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,uint64_t * pCounterValue,Dispatch const & d) const14287 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 14288 VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, 14289 uint64_t * pCounterValue, 14290 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14291 { 14292 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14293 return static_cast<Result>( d.vkGetSwapchainCounterEXT( 14294 static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) ); 14295 } 14296 14297 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14298 template <typename Dispatch> getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,Dispatch const & d) const14299 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( 14300 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const 14301 { 14302 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14303 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14304 VULKAN_HPP_ASSERT( d.vkGetSwapchainCounterEXT && "Function <vkGetSwapchainCounterEXT> requires <VK_EXT_display_control>" ); 14305 # endif 14306 14307 uint64_t counterValue; 14308 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14309 d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) ); 14310 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); 14311 14312 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( counterValue ) ); 14313 } 14314 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14315 14316 //=== VK_GOOGLE_display_timing === 14317 14318 template <typename Dispatch> 14319 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,Dispatch const & d) const14320 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 14321 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, 14322 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14323 { 14324 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14325 return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( static_cast<VkDevice>( m_device ), 14326 static_cast<VkSwapchainKHR>( swapchain ), 14327 reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) ); 14328 } 14329 14330 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14331 template <typename Dispatch> 14332 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const14333 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 14334 { 14335 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14336 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14337 VULKAN_HPP_ASSERT( d.vkGetRefreshCycleDurationGOOGLE && "Function <vkGetRefreshCycleDurationGOOGLE> requires <VK_GOOGLE_display_timing>" ); 14338 # endif 14339 14340 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; 14341 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRefreshCycleDurationGOOGLE( 14342 m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) ); 14343 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); 14344 14345 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displayTimingProperties ) ); 14346 } 14347 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14348 14349 template <typename Dispatch> 14350 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pPresentationTimingCount,VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,Dispatch const & d) const14351 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 14352 uint32_t * pPresentationTimingCount, 14353 VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, 14354 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14355 { 14356 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14357 return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ), 14358 static_cast<VkSwapchainKHR>( swapchain ), 14359 pPresentationTimingCount, 14360 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) ); 14361 } 14362 14363 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14364 template < 14365 typename PastPresentationTimingGOOGLEAllocator, 14366 typename Dispatch, 14367 typename std::enable_if<std::is_same<typename PastPresentationTimingGOOGLEAllocator::value_type, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, 14368 int>::type> 14369 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14370 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const14371 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 14372 { 14373 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14374 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14375 VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" ); 14376 # endif 14377 14378 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings; 14379 uint32_t presentationTimingCount; 14380 VULKAN_HPP_NAMESPACE::Result result; 14381 do 14382 { 14383 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14384 d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 14385 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) 14386 { 14387 presentationTimings.resize( presentationTimingCount ); 14388 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14389 d.vkGetPastPresentationTimingGOOGLE( m_device, 14390 static_cast<VkSwapchainKHR>( swapchain ), 14391 &presentationTimingCount, 14392 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 14393 } 14394 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14395 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 14396 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 14397 if ( presentationTimingCount < presentationTimings.size() ) 14398 { 14399 presentationTimings.resize( presentationTimingCount ); 14400 } 14401 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); 14402 } 14403 14404 template < 14405 typename PastPresentationTimingGOOGLEAllocator, 14406 typename Dispatch, 14407 typename std::enable_if<std::is_same<typename PastPresentationTimingGOOGLEAllocator::value_type, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, 14408 int>::type> 14409 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14410 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,Dispatch const & d) const14411 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 14412 PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, 14413 Dispatch const & d ) const 14414 { 14415 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14416 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14417 VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" ); 14418 # endif 14419 14420 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings( 14421 pastPresentationTimingGOOGLEAllocator ); 14422 uint32_t presentationTimingCount; 14423 VULKAN_HPP_NAMESPACE::Result result; 14424 do 14425 { 14426 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14427 d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 14428 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) 14429 { 14430 presentationTimings.resize( presentationTimingCount ); 14431 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14432 d.vkGetPastPresentationTimingGOOGLE( m_device, 14433 static_cast<VkSwapchainKHR>( swapchain ), 14434 &presentationTimingCount, 14435 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 14436 } 14437 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14438 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 14439 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 14440 if ( presentationTimingCount < presentationTimings.size() ) 14441 { 14442 presentationTimings.resize( presentationTimingCount ); 14443 } 14444 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); 14445 } 14446 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14447 14448 //=== VK_EXT_discard_rectangles === 14449 14450 template <typename Dispatch> setDiscardRectangleEXT(uint32_t firstDiscardRectangle,uint32_t discardRectangleCount,const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,Dispatch const & d) const14451 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 14452 uint32_t discardRectangleCount, 14453 const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, 14454 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14455 { 14456 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14457 d.vkCmdSetDiscardRectangleEXT( 14458 static_cast<VkCommandBuffer>( m_commandBuffer ), firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) ); 14459 } 14460 14461 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14462 template <typename Dispatch> setDiscardRectangleEXT(uint32_t firstDiscardRectangle,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,Dispatch const & d) const14463 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 14464 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, 14465 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14466 { 14467 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14468 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14469 VULKAN_HPP_ASSERT( d.vkCmdSetDiscardRectangleEXT && "Function <vkCmdSetDiscardRectangleEXT> requires <VK_EXT_discard_rectangles>" ); 14470 # endif 14471 14472 d.vkCmdSetDiscardRectangleEXT( 14473 m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) ); 14474 } 14475 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14476 14477 template <typename Dispatch> setDiscardRectangleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable,Dispatch const & d) const14478 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, 14479 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14480 { 14481 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14482 d.vkCmdSetDiscardRectangleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( discardRectangleEnable ) ); 14483 } 14484 14485 template <typename Dispatch> setDiscardRectangleModeEXT(VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode,Dispatch const & d) const14486 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, 14487 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14488 { 14489 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14490 d.vkCmdSetDiscardRectangleModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) ); 14491 } 14492 14493 //=== VK_EXT_hdr_metadata === 14494 14495 template <typename Dispatch> setHdrMetadataEXT(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,Dispatch const & d) const14496 VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, 14497 const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 14498 const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, 14499 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14500 { 14501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14502 d.vkSetHdrMetadataEXT( static_cast<VkDevice>( m_device ), 14503 swapchainCount, 14504 reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), 14505 reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) ); 14506 } 14507 14508 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14509 template <typename Dispatch> setHdrMetadataEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,Dispatch const & d) const14510 VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, 14511 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, 14512 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 14513 { 14514 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14515 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14516 VULKAN_HPP_ASSERT( d.vkSetHdrMetadataEXT && "Function <vkSetHdrMetadataEXT> requires <VK_EXT_hdr_metadata>" ); 14517 # endif 14518 # ifdef VULKAN_HPP_NO_EXCEPTIONS 14519 VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); 14520 # else 14521 if ( swapchains.size() != metadata.size() ) 14522 { 14523 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); 14524 } 14525 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 14526 14527 d.vkSetHdrMetadataEXT( m_device, 14528 swapchains.size(), 14529 reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), 14530 reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) ); 14531 } 14532 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14533 14534 //=== VK_KHR_create_renderpass2 === 14535 14536 template <typename Dispatch> createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const14537 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 14538 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14539 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 14540 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14541 { 14542 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14543 return static_cast<Result>( d.vkCreateRenderPass2KHR( static_cast<VkDevice>( m_device ), 14544 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 14545 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 14546 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 14547 } 14548 14549 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14550 template <typename Dispatch> 14551 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14552 Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 14553 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14554 Dispatch const & d ) const 14555 { 14556 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14557 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14558 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14559 # endif 14560 14561 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 14562 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14563 d.vkCreateRenderPass2KHR( m_device, 14564 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 14565 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14566 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 14567 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); 14568 14569 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); 14570 } 14571 14572 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14573 template <typename Dispatch> 14574 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2KHRUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14575 Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 14576 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14577 Dispatch const & d ) const 14578 { 14579 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14580 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14581 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14582 # endif 14583 14584 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 14585 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14586 d.vkCreateRenderPass2KHR( m_device, 14587 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 14588 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14589 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 14590 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); 14591 14592 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 14593 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 14594 } 14595 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 14596 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14597 14598 template <typename Dispatch> beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const14599 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 14600 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 14601 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14602 { 14603 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14604 d.vkCmdBeginRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 14605 reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), 14606 reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 14607 } 14608 14609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14610 template <typename Dispatch> beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const14611 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 14612 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 14613 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14614 { 14615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14616 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14617 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2KHR && "Function <vkCmdBeginRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14618 # endif 14619 14620 d.vkCmdBeginRenderPass2KHR( 14621 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 14622 } 14623 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14624 14625 template <typename Dispatch> nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const14626 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 14627 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 14628 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14629 { 14630 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14631 d.vkCmdNextSubpass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 14632 reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), 14633 reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 14634 } 14635 14636 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14637 template <typename Dispatch> nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const14638 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 14639 const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 14640 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14641 { 14642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14644 VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2KHR && "Function <vkCmdNextSubpass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14645 # endif 14646 14647 d.vkCmdNextSubpass2KHR( 14648 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 14649 } 14650 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14651 14652 template <typename Dispatch> endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const14653 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 14654 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14655 { 14656 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14657 d.vkCmdEndRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 14658 } 14659 14660 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14661 template <typename Dispatch> endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const14662 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 14663 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14664 { 14665 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14666 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14667 VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2KHR && "Function <vkCmdEndRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 14668 # endif 14669 14670 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 14671 } 14672 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14673 14674 //=== VK_KHR_shared_presentable_image === 14675 14676 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 14677 template <typename Dispatch> getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const14678 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 14679 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14680 { 14681 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14682 return static_cast<Result>( d.vkGetSwapchainStatusKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) ); 14683 } 14684 #else 14685 template <typename Dispatch> getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const14686 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 14687 Dispatch const & d ) const 14688 { 14689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14690 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14691 VULKAN_HPP_ASSERT( d.vkGetSwapchainStatusKHR && "Function <vkGetSwapchainStatusKHR> requires <VK_KHR_shared_presentable_image>" ); 14692 # endif 14693 14694 VULKAN_HPP_NAMESPACE::Result result = 14695 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 14696 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 14697 VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", 14698 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 14699 14700 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 14701 } 14702 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14703 14704 //=== VK_KHR_external_fence_capabilities === 14705 14706 template <typename Dispatch> getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const14707 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 14708 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 14709 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14710 { 14711 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14712 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 14713 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 14714 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 14715 } 14716 14717 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14718 template <typename Dispatch> 14719 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const14720 PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, 14721 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14722 { 14723 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14724 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14725 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFencePropertiesKHR && 14726 "Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" ); 14727 # endif 14728 14729 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 14730 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, 14731 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 14732 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 14733 14734 return externalFenceProperties; 14735 } 14736 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14737 14738 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 14739 //=== VK_KHR_external_fence_win32 === 14740 14741 template <typename Dispatch> importFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,Dispatch const & d) const14742 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( 14743 const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14744 { 14745 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14746 return static_cast<Result>( d.vkImportFenceWin32HandleKHR( static_cast<VkDevice>( m_device ), 14747 reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) ); 14748 } 14749 14750 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14751 template <typename Dispatch> 14752 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,Dispatch const & d) const14753 Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const 14754 { 14755 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14756 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14757 VULKAN_HPP_ASSERT( d.vkImportFenceWin32HandleKHR && "Function <vkImportFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" ); 14758 # endif 14759 14760 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14761 d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) ); 14762 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); 14763 14764 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14765 } 14766 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14767 14768 template <typename Dispatch> getFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const14769 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, 14770 HANDLE * pHandle, 14771 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14772 { 14773 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14774 return static_cast<Result>( 14775 d.vkGetFenceWin32HandleKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 14776 } 14777 14778 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14779 template <typename Dispatch> 14780 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const14781 Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 14782 { 14783 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14784 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14785 VULKAN_HPP_ASSERT( d.vkGetFenceWin32HandleKHR && "Function <vkGetFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" ); 14786 # endif 14787 14788 HANDLE handle; 14789 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14790 d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 14791 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); 14792 14793 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 14794 } 14795 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14796 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 14797 14798 //=== VK_KHR_external_fence_fd === 14799 14800 template <typename Dispatch> importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,Dispatch const & d) const14801 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, 14802 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14803 { 14804 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14805 return static_cast<Result>( 14806 d.vkImportFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) ); 14807 } 14808 14809 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14810 template <typename Dispatch> 14811 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo,Dispatch const & d) const14812 Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const 14813 { 14814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14815 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14816 VULKAN_HPP_ASSERT( d.vkImportFenceFdKHR && "Function <vkImportFenceFdKHR> requires <VK_KHR_external_fence_fd>" ); 14817 # endif 14818 14819 VULKAN_HPP_NAMESPACE::Result result = 14820 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) ); 14821 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); 14822 14823 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14824 } 14825 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14826 14827 template <typename Dispatch> getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const14828 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, 14829 int * pFd, 14830 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14831 { 14832 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14833 return static_cast<Result>( d.vkGetFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 14834 } 14835 14836 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14837 template <typename Dispatch> getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,Dispatch const & d) const14838 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, 14839 Dispatch const & d ) const 14840 { 14841 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14842 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14843 VULKAN_HPP_ASSERT( d.vkGetFenceFdKHR && "Function <vkGetFenceFdKHR> requires <VK_KHR_external_fence_fd>" ); 14844 # endif 14845 14846 int fd; 14847 VULKAN_HPP_NAMESPACE::Result result = 14848 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 14849 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); 14850 14851 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); 14852 } 14853 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14854 14855 //=== VK_KHR_performance_query === 14856 14857 template <typename Dispatch> 14858 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,uint32_t * pCounterCount,VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,Dispatch const & d) const14859 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, 14860 uint32_t * pCounterCount, 14861 VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, 14862 VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, 14863 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14864 { 14865 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14866 return static_cast<Result>( 14867 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 14868 queueFamilyIndex, 14869 pCounterCount, 14870 reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), 14871 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) ); 14872 } 14873 14874 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14875 template <typename PerformanceCounterKHRAllocator, 14876 typename PerformanceCounterDescriptionKHRAllocator, 14877 typename Dispatch, 14878 typename std::enable_if< 14879 std::is_same<typename PerformanceCounterKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value && 14880 std::is_same<typename PerformanceCounterDescriptionKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, 14881 int>::type> 14882 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14883 typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 14884 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,Dispatch const & d) const14885 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const 14886 { 14887 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14888 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14889 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && 14890 "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" ); 14891 # endif 14892 14893 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 14894 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 14895 data_; 14896 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; 14897 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; 14898 uint32_t counterCount; 14899 VULKAN_HPP_NAMESPACE::Result result; 14900 do 14901 { 14902 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14903 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 14904 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) 14905 { 14906 counters.resize( counterCount ); 14907 counterDescriptions.resize( counterCount ); 14908 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 14909 m_physicalDevice, 14910 queueFamilyIndex, 14911 &counterCount, 14912 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 14913 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 14914 } 14915 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14916 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 14917 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 14918 if ( counterCount < counters.size() ) 14919 { 14920 counters.resize( counterCount ); 14921 counterDescriptions.resize( counterCount ); 14922 } 14923 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 14924 } 14925 14926 template <typename PerformanceCounterKHRAllocator, 14927 typename PerformanceCounterDescriptionKHRAllocator, 14928 typename Dispatch, 14929 typename std::enable_if< 14930 std::is_same<typename PerformanceCounterKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value && 14931 std::is_same<typename PerformanceCounterDescriptionKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, 14932 int>::type> 14933 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14934 typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 14935 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,Dispatch const & d) const14936 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, 14937 PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, 14938 PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, 14939 Dispatch const & d ) const 14940 { 14941 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14942 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14943 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && 14944 "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" ); 14945 # endif 14946 14947 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 14948 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 14949 data_( 14950 std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); 14951 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; 14952 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; 14953 uint32_t counterCount; 14954 VULKAN_HPP_NAMESPACE::Result result; 14955 do 14956 { 14957 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14958 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 14959 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) 14960 { 14961 counters.resize( counterCount ); 14962 counterDescriptions.resize( counterCount ); 14963 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 14964 m_physicalDevice, 14965 queueFamilyIndex, 14966 &counterCount, 14967 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 14968 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 14969 } 14970 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14971 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 14972 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 14973 if ( counterCount < counters.size() ) 14974 { 14975 counters.resize( counterCount ); 14976 counterDescriptions.resize( counterCount ); 14977 } 14978 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 14979 } 14980 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14981 14982 template <typename Dispatch> 14983 VULKAN_HPP_INLINE void getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,uint32_t * pNumPasses,Dispatch const & d) const14984 PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, 14985 uint32_t * pNumPasses, 14986 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14987 { 14988 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14989 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 14990 reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), 14991 pNumPasses ); 14992 } 14993 14994 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14995 template <typename Dispatch> getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,Dispatch const & d) const14996 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( 14997 const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14998 { 14999 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15000 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15001 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && 15002 "Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> requires <VK_KHR_performance_query>" ); 15003 # endif 15004 15005 uint32_t numPasses; 15006 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( 15007 m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses ); 15008 15009 return numPasses; 15010 } 15011 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15012 15013 template <typename Dispatch> acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,Dispatch const & d) const15014 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, 15015 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15016 { 15017 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15018 return static_cast<Result>( 15019 d.vkAcquireProfilingLockKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) ); 15020 } 15021 15022 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15023 template <typename Dispatch> 15024 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info,Dispatch const & d) const15025 Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const 15026 { 15027 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15028 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15029 VULKAN_HPP_ASSERT( d.vkAcquireProfilingLockKHR && "Function <vkAcquireProfilingLockKHR> requires <VK_KHR_performance_query>" ); 15030 # endif 15031 15032 VULKAN_HPP_NAMESPACE::Result result = 15033 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) ); 15034 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); 15035 15036 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 15037 } 15038 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15039 15040 template <typename Dispatch> releaseProfilingLockKHR(Dispatch const & d) const15041 VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15042 { 15043 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15044 d.vkReleaseProfilingLockKHR( static_cast<VkDevice>( m_device ) ); 15045 } 15046 15047 //=== VK_KHR_get_surface_capabilities2 === 15048 15049 template <typename Dispatch> 15050 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,Dispatch const & d) const15051 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 15052 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, 15053 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15054 { 15055 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15056 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 15057 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 15058 reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) ); 15059 } 15060 15061 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15062 template <typename Dispatch> 15063 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const15064 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 15065 { 15066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15067 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15068 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && 15069 "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 15070 # endif 15071 15072 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; 15073 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15074 d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 15075 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15076 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 15077 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 15078 15079 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); 15080 } 15081 15082 template <typename X, typename Y, typename... Z, typename Dispatch> 15083 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const15084 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 15085 { 15086 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15087 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15088 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && 15089 "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 15090 # endif 15091 15092 StructureChain<X, Y, Z...> structureChain; 15093 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>(); 15094 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15095 d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 15096 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15097 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 15098 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 15099 15100 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 15101 } 15102 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15103 15104 template <typename Dispatch> getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,Dispatch const & d) const15105 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 15106 uint32_t * pSurfaceFormatCount, 15107 VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, 15108 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15109 { 15110 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15111 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 15112 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 15113 pSurfaceFormatCount, 15114 reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) ); 15115 } 15116 15117 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15118 template <typename SurfaceFormat2KHRAllocator, 15119 typename Dispatch, 15120 typename std::enable_if<std::is_same<typename SurfaceFormat2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, int>::type> 15121 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const15122 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 15123 { 15124 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15125 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15126 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 15127 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 15128 # endif 15129 15130 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats; 15131 uint32_t surfaceFormatCount; 15132 VULKAN_HPP_NAMESPACE::Result result; 15133 do 15134 { 15135 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 15136 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 15137 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 15138 { 15139 surfaceFormats.resize( surfaceFormatCount ); 15140 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15141 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 15142 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15143 &surfaceFormatCount, 15144 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 15145 } 15146 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15147 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 15148 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 15149 if ( surfaceFormatCount < surfaceFormats.size() ) 15150 { 15151 surfaceFormats.resize( surfaceFormatCount ); 15152 } 15153 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 15154 } 15155 15156 template <typename SurfaceFormat2KHRAllocator, 15157 typename Dispatch, 15158 typename std::enable_if<std::is_same<typename SurfaceFormat2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, int>::type> 15159 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,Dispatch const & d) const15160 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 15161 SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, 15162 Dispatch const & d ) const 15163 { 15164 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15165 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15166 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 15167 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 15168 # endif 15169 15170 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator ); 15171 uint32_t surfaceFormatCount; 15172 VULKAN_HPP_NAMESPACE::Result result; 15173 do 15174 { 15175 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 15176 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 15177 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 15178 { 15179 surfaceFormats.resize( surfaceFormatCount ); 15180 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15181 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 15182 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15183 &surfaceFormatCount, 15184 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 15185 } 15186 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15187 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 15188 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 15189 if ( surfaceFormatCount < surfaceFormats.size() ) 15190 { 15191 surfaceFormats.resize( surfaceFormatCount ); 15192 } 15193 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 15194 } 15195 15196 template <typename StructureChain, 15197 typename StructureChainAllocator, 15198 typename Dispatch, 15199 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 15200 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const15201 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 15202 { 15203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15204 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15205 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 15206 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 15207 # endif 15208 15209 std::vector<StructureChain, StructureChainAllocator> structureChains; 15210 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; 15211 uint32_t surfaceFormatCount; 15212 VULKAN_HPP_NAMESPACE::Result result; 15213 do 15214 { 15215 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 15216 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 15217 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 15218 { 15219 structureChains.resize( surfaceFormatCount ); 15220 surfaceFormats.resize( surfaceFormatCount ); 15221 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 15222 { 15223 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; 15224 } 15225 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15226 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 15227 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15228 &surfaceFormatCount, 15229 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 15230 } 15231 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15232 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 15233 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 15234 if ( surfaceFormatCount < surfaceFormats.size() ) 15235 { 15236 structureChains.resize( surfaceFormatCount ); 15237 } 15238 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 15239 { 15240 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; 15241 } 15242 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 15243 } 15244 15245 template <typename StructureChain, 15246 typename StructureChainAllocator, 15247 typename Dispatch, 15248 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 15249 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,StructureChainAllocator & structureChainAllocator,Dispatch const & d) const15250 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 15251 StructureChainAllocator & structureChainAllocator, 15252 Dispatch const & d ) const 15253 { 15254 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15255 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15256 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 15257 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 15258 # endif 15259 15260 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 15261 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; 15262 uint32_t surfaceFormatCount; 15263 VULKAN_HPP_NAMESPACE::Result result; 15264 do 15265 { 15266 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 15267 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 15268 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 15269 { 15270 structureChains.resize( surfaceFormatCount ); 15271 surfaceFormats.resize( surfaceFormatCount ); 15272 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 15273 { 15274 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; 15275 } 15276 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15277 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 15278 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15279 &surfaceFormatCount, 15280 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 15281 } 15282 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15283 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 15284 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 15285 if ( surfaceFormatCount < surfaceFormats.size() ) 15286 { 15287 structureChains.resize( surfaceFormatCount ); 15288 } 15289 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 15290 { 15291 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; 15292 } 15293 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 15294 } 15295 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15296 15297 //=== VK_KHR_get_display_properties2 === 15298 15299 template <typename Dispatch> getDisplayProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,Dispatch const & d) const15300 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, 15301 VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, 15302 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15303 { 15304 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15305 return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( 15306 static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) ); 15307 } 15308 15309 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15310 template < 15311 typename DisplayProperties2KHRAllocator, 15312 typename Dispatch, 15313 typename std::enable_if<std::is_same<typename DisplayProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, int>::type> 15314 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15315 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(Dispatch const & d) const15316 PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const 15317 { 15318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15319 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15320 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && 15321 "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15322 # endif 15323 15324 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties; 15325 uint32_t propertyCount; 15326 VULKAN_HPP_NAMESPACE::Result result; 15327 do 15328 { 15329 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 15330 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15331 { 15332 properties.resize( propertyCount ); 15333 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15334 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 15335 } 15336 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15337 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 15338 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15339 if ( propertyCount < properties.size() ) 15340 { 15341 properties.resize( propertyCount ); 15342 } 15343 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15344 } 15345 15346 template < 15347 typename DisplayProperties2KHRAllocator, 15348 typename Dispatch, 15349 typename std::enable_if<std::is_same<typename DisplayProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, int>::type> 15350 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15351 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,Dispatch const & d) const15352 PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const 15353 { 15354 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15355 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15356 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && 15357 "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15358 # endif 15359 15360 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator ); 15361 uint32_t propertyCount; 15362 VULKAN_HPP_NAMESPACE::Result result; 15363 do 15364 { 15365 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 15366 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15367 { 15368 properties.resize( propertyCount ); 15369 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15370 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 15371 } 15372 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15373 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 15374 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15375 if ( propertyCount < properties.size() ) 15376 { 15377 properties.resize( propertyCount ); 15378 } 15379 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15380 } 15381 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15382 15383 template <typename Dispatch> getDisplayPlaneProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,Dispatch const & d) const15384 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, 15385 VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, 15386 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15387 { 15388 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15389 return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 15390 static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) ); 15391 } 15392 15393 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15394 template < 15395 typename DisplayPlaneProperties2KHRAllocator, 15396 typename Dispatch, 15397 typename std::enable_if<std::is_same<typename DisplayPlaneProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, 15398 int>::type> 15399 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15400 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(Dispatch const & d) const15401 PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const 15402 { 15403 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15404 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15405 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && 15406 "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15407 # endif 15408 15409 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties; 15410 uint32_t propertyCount; 15411 VULKAN_HPP_NAMESPACE::Result result; 15412 do 15413 { 15414 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 15415 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15416 { 15417 properties.resize( propertyCount ); 15418 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 15419 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 15420 } 15421 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15422 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 15423 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15424 if ( propertyCount < properties.size() ) 15425 { 15426 properties.resize( propertyCount ); 15427 } 15428 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15429 } 15430 15431 template < 15432 typename DisplayPlaneProperties2KHRAllocator, 15433 typename Dispatch, 15434 typename std::enable_if<std::is_same<typename DisplayPlaneProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, 15435 int>::type> 15436 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15437 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,Dispatch const & d) const15438 PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const 15439 { 15440 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15441 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15442 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && 15443 "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15444 # endif 15445 15446 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator ); 15447 uint32_t propertyCount; 15448 VULKAN_HPP_NAMESPACE::Result result; 15449 do 15450 { 15451 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 15452 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15453 { 15454 properties.resize( propertyCount ); 15455 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 15456 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 15457 } 15458 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15459 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 15460 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15461 if ( propertyCount < properties.size() ) 15462 { 15463 properties.resize( propertyCount ); 15464 } 15465 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15466 } 15467 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15468 15469 template <typename Dispatch> getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,Dispatch const & d) const15470 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 15471 uint32_t * pPropertyCount, 15472 VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, 15473 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15474 { 15475 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15476 return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 15477 static_cast<VkDisplayKHR>( display ), 15478 pPropertyCount, 15479 reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) ); 15480 } 15481 15482 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15483 template < 15484 typename DisplayModeProperties2KHRAllocator, 15485 typename Dispatch, 15486 typename std::enable_if<std::is_same<typename DisplayModeProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, 15487 int>::type> 15488 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15489 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const15490 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 15491 { 15492 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15493 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15494 VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15495 # endif 15496 15497 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties; 15498 uint32_t propertyCount; 15499 VULKAN_HPP_NAMESPACE::Result result; 15500 do 15501 { 15502 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15503 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 15504 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15505 { 15506 properties.resize( propertyCount ); 15507 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 15508 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 15509 } 15510 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15511 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 15512 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15513 if ( propertyCount < properties.size() ) 15514 { 15515 properties.resize( propertyCount ); 15516 } 15517 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15518 } 15519 15520 template < 15521 typename DisplayModeProperties2KHRAllocator, 15522 typename Dispatch, 15523 typename std::enable_if<std::is_same<typename DisplayModeProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, 15524 int>::type> 15525 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15526 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,Dispatch const & d) const15527 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 15528 DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, 15529 Dispatch const & d ) const 15530 { 15531 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15532 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15533 VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15534 # endif 15535 15536 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator ); 15537 uint32_t propertyCount; 15538 VULKAN_HPP_NAMESPACE::Result result; 15539 do 15540 { 15541 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15542 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 15543 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15544 { 15545 properties.resize( propertyCount ); 15546 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 15547 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 15548 } 15549 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15550 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 15551 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15552 if ( propertyCount < properties.size() ) 15553 { 15554 properties.resize( propertyCount ); 15555 } 15556 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15557 } 15558 15559 template <typename StructureChain, 15560 typename StructureChainAllocator, 15561 typename Dispatch, 15562 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 15563 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const15564 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 15565 { 15566 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15567 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15568 VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15569 # endif 15570 15571 std::vector<StructureChain, StructureChainAllocator> structureChains; 15572 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> properties; 15573 uint32_t propertyCount; 15574 VULKAN_HPP_NAMESPACE::Result result; 15575 do 15576 { 15577 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15578 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 15579 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15580 { 15581 structureChains.resize( propertyCount ); 15582 properties.resize( propertyCount ); 15583 for ( uint32_t i = 0; i < propertyCount; i++ ) 15584 { 15585 properties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>().pNext; 15586 } 15587 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 15588 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 15589 } 15590 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15591 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 15592 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15593 if ( propertyCount < properties.size() ) 15594 { 15595 structureChains.resize( propertyCount ); 15596 } 15597 for ( uint32_t i = 0; i < propertyCount; i++ ) 15598 { 15599 structureChains[i].template get<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>() = properties[i]; 15600 } 15601 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 15602 } 15603 15604 template <typename StructureChain, 15605 typename StructureChainAllocator, 15606 typename Dispatch, 15607 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 15608 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,StructureChainAllocator & structureChainAllocator,Dispatch const & d) const15609 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 15610 StructureChainAllocator & structureChainAllocator, 15611 Dispatch const & d ) const 15612 { 15613 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15614 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15615 VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 15616 # endif 15617 15618 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 15619 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> properties; 15620 uint32_t propertyCount; 15621 VULKAN_HPP_NAMESPACE::Result result; 15622 do 15623 { 15624 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15625 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 15626 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 15627 { 15628 structureChains.resize( propertyCount ); 15629 properties.resize( propertyCount ); 15630 for ( uint32_t i = 0; i < propertyCount; i++ ) 15631 { 15632 properties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>().pNext; 15633 } 15634 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 15635 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 15636 } 15637 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15638 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 15639 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15640 if ( propertyCount < properties.size() ) 15641 { 15642 structureChains.resize( propertyCount ); 15643 } 15644 for ( uint32_t i = 0; i < propertyCount; i++ ) 15645 { 15646 structureChains[i].template get<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>() = properties[i]; 15647 } 15648 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 15649 } 15650 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15651 15652 template <typename Dispatch> 15653 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,Dispatch const & d) const15654 PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, 15655 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, 15656 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15657 { 15658 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15659 return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 15660 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), 15661 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) ); 15662 } 15663 15664 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15665 template <typename Dispatch> 15666 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo,Dispatch const & d) const15667 PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const 15668 { 15669 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15670 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15671 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilities2KHR && "Function <vkGetDisplayPlaneCapabilities2KHR> requires <VK_KHR_get_display_properties2>" ); 15672 # endif 15673 15674 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; 15675 VULKAN_HPP_NAMESPACE::Result result = 15676 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, 15677 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), 15678 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) ); 15679 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); 15680 15681 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); 15682 } 15683 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15684 15685 #if defined( VK_USE_PLATFORM_IOS_MVK ) 15686 //=== VK_MVK_ios_surface === 15687 15688 template <typename Dispatch> createIOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const15689 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, 15690 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15691 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 15692 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15693 { 15694 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15695 return static_cast<Result>( d.vkCreateIOSSurfaceMVK( static_cast<VkInstance>( m_instance ), 15696 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), 15697 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15698 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 15699 } 15700 15701 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15702 template <typename Dispatch> 15703 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15704 Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, 15705 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15706 Dispatch const & d ) const 15707 { 15708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15709 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15710 VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" ); 15711 # endif 15712 15713 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15714 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15715 d.vkCreateIOSSurfaceMVK( m_instance, 15716 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 15717 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15718 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15719 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); 15720 15721 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 15722 } 15723 15724 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15725 template <typename Dispatch> 15726 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createIOSSurfaceMVKUnique(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15727 Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, 15728 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15729 Dispatch const & d ) const 15730 { 15731 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15732 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15733 VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" ); 15734 # endif 15735 15736 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15737 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15738 d.vkCreateIOSSurfaceMVK( m_instance, 15739 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 15740 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15741 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15742 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); 15743 15744 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 15745 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 15746 } 15747 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15748 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15749 #endif /*VK_USE_PLATFORM_IOS_MVK*/ 15750 15751 #if defined( VK_USE_PLATFORM_MACOS_MVK ) 15752 //=== VK_MVK_macos_surface === 15753 15754 template <typename Dispatch> createMacOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const15755 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, 15756 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15757 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 15758 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15759 { 15760 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15761 return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( static_cast<VkInstance>( m_instance ), 15762 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), 15763 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15764 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 15765 } 15766 15767 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15768 template <typename Dispatch> 15769 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15770 Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, 15771 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15772 Dispatch const & d ) const 15773 { 15774 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15775 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15776 VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" ); 15777 # endif 15778 15779 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15780 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15781 d.vkCreateMacOSSurfaceMVK( m_instance, 15782 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 15783 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15784 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15785 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); 15786 15787 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 15788 } 15789 15790 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15791 template <typename Dispatch> 15792 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMacOSSurfaceMVKUnique(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15793 Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, 15794 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15795 Dispatch const & d ) const 15796 { 15797 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15798 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15799 VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" ); 15800 # endif 15801 15802 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15803 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15804 d.vkCreateMacOSSurfaceMVK( m_instance, 15805 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 15806 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15807 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15808 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); 15809 15810 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 15811 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 15812 } 15813 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15814 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15815 #endif /*VK_USE_PLATFORM_MACOS_MVK*/ 15816 15817 //=== VK_EXT_debug_utils === 15818 15819 template <typename Dispatch> setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,Dispatch const & d) const15820 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, 15821 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15822 { 15823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15824 return static_cast<Result>( 15825 d.vkSetDebugUtilsObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) ); 15826 } 15827 15828 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15829 template <typename Dispatch> 15830 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo,Dispatch const & d) const15831 Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 15832 { 15833 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15834 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15835 VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectNameEXT && "Function <vkSetDebugUtilsObjectNameEXT> requires <VK_EXT_debug_utils>" ); 15836 # endif 15837 15838 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15839 d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) ); 15840 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); 15841 15842 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 15843 } 15844 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15845 15846 template <typename Dispatch> setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,Dispatch const & d) const15847 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, 15848 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15849 { 15850 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15851 return static_cast<Result>( 15852 d.vkSetDebugUtilsObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) ); 15853 } 15854 15855 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15856 template <typename Dispatch> 15857 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo,Dispatch const & d) const15858 Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 15859 { 15860 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15861 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15862 VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectTagEXT && "Function <vkSetDebugUtilsObjectTagEXT> requires <VK_EXT_debug_utils>" ); 15863 # endif 15864 15865 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15866 d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) ); 15867 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); 15868 15869 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 15870 } 15871 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15872 15873 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const15874 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 15875 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15876 { 15877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15878 d.vkQueueBeginDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 15879 } 15880 15881 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15882 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const15883 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 15884 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15885 { 15886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15887 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15888 VULKAN_HPP_ASSERT( d.vkQueueBeginDebugUtilsLabelEXT && "Function <vkQueueBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 15889 # endif 15890 15891 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 15892 } 15893 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15894 15895 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const15896 VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15897 { 15898 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15899 d.vkQueueEndDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ) ); 15900 } 15901 15902 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const15903 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 15904 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15905 { 15906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15907 d.vkQueueInsertDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 15908 } 15909 15910 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15911 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const15912 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 15913 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15914 { 15915 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15916 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15917 VULKAN_HPP_ASSERT( d.vkQueueInsertDebugUtilsLabelEXT && "Function <vkQueueInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 15918 # endif 15919 15920 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 15921 } 15922 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15923 15924 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const15925 VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 15926 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15927 { 15928 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15929 d.vkCmdBeginDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 15930 } 15931 15932 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15933 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const15934 VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 15935 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15936 { 15937 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15938 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15939 VULKAN_HPP_ASSERT( d.vkCmdBeginDebugUtilsLabelEXT && "Function <vkCmdBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 15940 # endif 15941 15942 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 15943 } 15944 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15945 15946 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const15947 VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15948 { 15949 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15950 d.vkCmdEndDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) ); 15951 } 15952 15953 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const15954 VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 15955 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15956 { 15957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15958 d.vkCmdInsertDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 15959 } 15960 15961 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15962 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const15963 VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 15964 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15965 { 15966 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15967 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15968 VULKAN_HPP_ASSERT( d.vkCmdInsertDebugUtilsLabelEXT && "Function <vkCmdInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 15969 # endif 15970 15971 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 15972 } 15973 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15974 15975 template <typename Dispatch> 15976 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,Dispatch const & d) const15977 Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, 15978 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15979 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, 15980 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15981 { 15982 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15983 return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), 15984 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), 15985 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15986 reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) ); 15987 } 15988 15989 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15990 template <typename Dispatch> 15991 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15992 Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, 15993 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15994 Dispatch const & d ) const 15995 { 15996 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15997 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15998 VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 15999 # endif 16000 16001 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 16002 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT( 16003 m_instance, 16004 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 16005 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16006 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 16007 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); 16008 16009 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( messenger ) ); 16010 } 16011 16012 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16013 template <typename Dispatch> 16014 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type createDebugUtilsMessengerEXTUnique(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16015 Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, 16016 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16017 Dispatch const & d ) const 16018 { 16019 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16020 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16021 VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 16022 # endif 16023 16024 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 16025 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT( 16026 m_instance, 16027 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 16028 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16029 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 16030 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); 16031 16032 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 16033 result, 16034 UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( messenger, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 16035 } 16036 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 16037 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16038 16039 template <typename Dispatch> destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16040 VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 16041 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16042 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16043 { 16044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16045 d.vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), 16046 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 16047 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16048 } 16049 16050 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16051 template <typename Dispatch> destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16052 VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 16053 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16054 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16055 { 16056 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16057 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16058 VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 16059 # endif 16060 16061 d.vkDestroyDebugUtilsMessengerEXT( 16062 m_instance, 16063 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 16064 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16065 } 16066 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16067 16068 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16069 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 16070 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16071 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16072 { 16073 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16074 d.vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), 16075 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 16076 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16077 } 16078 16079 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16080 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16081 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 16082 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16083 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16084 { 16085 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16086 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16087 VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 16088 # endif 16089 16090 d.vkDestroyDebugUtilsMessengerEXT( 16091 m_instance, 16092 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 16093 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16094 } 16095 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16096 16097 template <typename Dispatch> submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,Dispatch const & d) const16098 VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 16099 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 16100 const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, 16101 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16102 { 16103 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16104 d.vkSubmitDebugUtilsMessageEXT( static_cast<VkInstance>( m_instance ), 16105 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 16106 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 16107 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) ); 16108 } 16109 16110 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16111 template <typename Dispatch> submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,Dispatch const & d) const16112 VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 16113 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 16114 const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, 16115 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16116 { 16117 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16118 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16119 VULKAN_HPP_ASSERT( d.vkSubmitDebugUtilsMessageEXT && "Function <vkSubmitDebugUtilsMessageEXT> requires <VK_EXT_debug_utils>" ); 16120 # endif 16121 16122 d.vkSubmitDebugUtilsMessageEXT( m_instance, 16123 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 16124 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 16125 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) ); 16126 } 16127 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16128 16129 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 16130 //=== VK_ANDROID_external_memory_android_hardware_buffer === 16131 16132 template <typename Dispatch> 16133 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer * buffer,VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,Dispatch const & d) const16134 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, 16135 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, 16136 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16137 { 16138 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16139 return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( 16140 static_cast<VkDevice>( m_device ), buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) ); 16141 } 16142 16143 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16144 template <typename Dispatch> 16145 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const16146 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 16147 { 16148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16149 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16150 VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && 16151 "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); 16152 # endif 16153 16154 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; 16155 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16156 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 16157 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 16158 16159 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 16160 } 16161 16162 template <typename X, typename Y, typename... Z, typename Dispatch> 16163 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const16164 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 16165 { 16166 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16167 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16168 VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && 16169 "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); 16170 # endif 16171 16172 StructureChain<X, Y, Z...> structureChain; 16173 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = 16174 structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>(); 16175 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16176 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 16177 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 16178 16179 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 16180 } 16181 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16182 16183 template <typename Dispatch> 16184 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryAndroidHardwareBufferANDROID(const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer,Dispatch const & d) const16185 Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, 16186 struct AHardwareBuffer ** pBuffer, 16187 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16188 { 16189 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16190 return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( 16191 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) ); 16192 } 16193 16194 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16195 template <typename Dispatch> 16196 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type getMemoryAndroidHardwareBufferANDROID(const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info,Dispatch const & d) const16197 Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const 16198 { 16199 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16200 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16201 VULKAN_HPP_ASSERT( d.vkGetMemoryAndroidHardwareBufferANDROID && 16202 "Function <vkGetMemoryAndroidHardwareBufferANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); 16203 # endif 16204 16205 struct AHardwareBuffer * buffer; 16206 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16207 d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) ); 16208 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); 16209 16210 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); 16211 } 16212 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16213 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 16214 16215 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 16216 //=== VK_AMDX_shader_enqueue === 16217 16218 template <typename Dispatch> 16219 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const16220 Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16221 uint32_t createInfoCount, 16222 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, 16223 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16224 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 16225 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16226 { 16227 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16228 return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( static_cast<VkDevice>( m_device ), 16229 static_cast<VkPipelineCache>( pipelineCache ), 16230 createInfoCount, 16231 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ), 16232 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 16233 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 16234 } 16235 16236 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16237 template <typename PipelineAllocator, 16238 typename Dispatch, 16239 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16240 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX( 16241 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16242 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 16243 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16244 Dispatch const & d ) const 16245 { 16246 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16247 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16248 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 16249 # endif 16250 16251 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 16252 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 16253 m_device, 16254 static_cast<VkPipelineCache>( pipelineCache ), 16255 createInfos.size(), 16256 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 16257 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16258 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16259 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16260 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", 16261 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16262 16263 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 16264 } 16265 16266 template <typename PipelineAllocator, 16267 typename Dispatch, 16268 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const16269 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX( 16270 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16271 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 16272 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16273 PipelineAllocator & pipelineAllocator, 16274 Dispatch const & d ) const 16275 { 16276 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16277 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16278 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 16279 # endif 16280 16281 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 16282 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 16283 m_device, 16284 static_cast<VkPipelineCache>( pipelineCache ), 16285 createInfos.size(), 16286 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 16287 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16288 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16289 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16290 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", 16291 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16292 16293 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 16294 } 16295 16296 template <typename Dispatch> 16297 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createExecutionGraphPipelineAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16298 Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16299 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, 16300 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16301 Dispatch const & d ) const 16302 { 16303 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16304 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16305 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 16306 # endif 16307 16308 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 16309 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 16310 m_device, 16311 static_cast<VkPipelineCache>( pipelineCache ), 16312 1, 16313 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), 16314 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16315 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 16316 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16317 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", 16318 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16319 16320 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 16321 } 16322 16323 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16324 template < 16325 typename Dispatch, 16326 typename PipelineAllocator, 16327 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 16328 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16329 Device::createExecutionGraphPipelinesAMDXUnique( 16330 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16331 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 16332 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16333 Dispatch const & d ) const 16334 { 16335 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16336 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16337 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 16338 # endif 16339 16340 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 16341 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 16342 m_device, 16343 static_cast<VkPipelineCache>( pipelineCache ), 16344 createInfos.size(), 16345 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 16346 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16347 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16348 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16349 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", 16350 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16351 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 16352 uniquePipelines.reserve( createInfos.size() ); 16353 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 16354 for ( auto const & pipeline : pipelines ) 16355 { 16356 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 16357 } 16358 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 16359 } 16360 16361 template < 16362 typename Dispatch, 16363 typename PipelineAllocator, 16364 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 16365 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const16366 Device::createExecutionGraphPipelinesAMDXUnique( 16367 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16368 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 16369 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16370 PipelineAllocator & pipelineAllocator, 16371 Dispatch const & d ) const 16372 { 16373 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16374 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16375 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 16376 # endif 16377 16378 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 16379 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 16380 m_device, 16381 static_cast<VkPipelineCache>( pipelineCache ), 16382 createInfos.size(), 16383 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 16384 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16385 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16386 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16387 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", 16388 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16389 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 16390 uniquePipelines.reserve( createInfos.size() ); 16391 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 16392 for ( auto const & pipeline : pipelines ) 16393 { 16394 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 16395 } 16396 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 16397 } 16398 16399 template <typename Dispatch> 16400 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createExecutionGraphPipelineAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16401 Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16402 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, 16403 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16404 Dispatch const & d ) const 16405 { 16406 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16407 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16408 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 16409 # endif 16410 16411 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 16412 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 16413 m_device, 16414 static_cast<VkPipelineCache>( pipelineCache ), 16415 1, 16416 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), 16417 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16418 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 16419 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16420 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", 16421 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16422 16423 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 16424 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 16425 } 16426 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 16427 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16428 16429 template <typename Dispatch> 16430 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExecutionGraphPipelineScratchSizeAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,Dispatch const & d) const16431 Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, 16432 VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, 16433 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16434 { 16435 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16436 return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast<VkDevice>( m_device ), 16437 static_cast<VkPipeline>( executionGraph ), 16438 reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) ); 16439 } 16440 16441 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16442 template <typename Dispatch> 16443 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::type getExecutionGraphPipelineScratchSizeAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,Dispatch const & d) const16444 Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const 16445 { 16446 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16447 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16448 VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineScratchSizeAMDX && 16449 "Function <vkGetExecutionGraphPipelineScratchSizeAMDX> requires <VK_AMDX_shader_enqueue>" ); 16450 # endif 16451 16452 VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; 16453 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( 16454 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) ) ); 16455 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); 16456 16457 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sizeInfo ) ); 16458 } 16459 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16460 16461 template <typename Dispatch> 16462 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExecutionGraphPipelineNodeIndexAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,uint32_t * pNodeIndex,Dispatch const & d) const16463 Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, 16464 const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, 16465 uint32_t * pNodeIndex, 16466 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16467 { 16468 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16469 return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast<VkDevice>( m_device ), 16470 static_cast<VkPipeline>( executionGraph ), 16471 reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), 16472 pNodeIndex ) ); 16473 } 16474 16475 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16476 template <typename Dispatch> getExecutionGraphPipelineNodeIndexAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo,Dispatch const & d) const16477 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type Device::getExecutionGraphPipelineNodeIndexAMDX( 16478 VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const 16479 { 16480 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16481 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16482 VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineNodeIndexAMDX && "Function <vkGetExecutionGraphPipelineNodeIndexAMDX> requires <VK_AMDX_shader_enqueue>" ); 16483 # endif 16484 16485 uint32_t nodeIndex; 16486 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( 16487 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ), &nodeIndex ) ); 16488 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); 16489 16490 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( nodeIndex ) ); 16491 } 16492 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16493 16494 template <typename Dispatch> initializeGraphScratchMemoryAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,VULKAN_HPP_NAMESPACE::DeviceAddress scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,Dispatch const & d) const16495 VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, 16496 VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 16497 VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, 16498 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16499 { 16500 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16501 d.vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), 16502 static_cast<VkPipeline>( executionGraph ), 16503 static_cast<VkDeviceAddress>( scratch ), 16504 static_cast<VkDeviceSize>( scratchSize ) ); 16505 } 16506 16507 template <typename Dispatch> dispatchGraphAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,Dispatch const & d) const16508 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 16509 VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, 16510 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, 16511 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16512 { 16513 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16514 d.vkCmdDispatchGraphAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), 16515 static_cast<VkDeviceAddress>( scratch ), 16516 static_cast<VkDeviceSize>( scratchSize ), 16517 reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); 16518 } 16519 16520 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16521 template <typename Dispatch> dispatchGraphAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,Dispatch const & d) const16522 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 16523 VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, 16524 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, 16525 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16526 { 16527 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16528 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16529 VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" ); 16530 # endif 16531 16532 d.vkCmdDispatchGraphAMDX( m_commandBuffer, 16533 static_cast<VkDeviceAddress>( scratch ), 16534 static_cast<VkDeviceSize>( scratchSize ), 16535 reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); 16536 } 16537 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16538 16539 template <typename Dispatch> dispatchGraphIndirectAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,Dispatch const & d) const16540 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 16541 VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, 16542 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, 16543 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16544 { 16545 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16546 d.vkCmdDispatchGraphIndirectAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), 16547 static_cast<VkDeviceAddress>( scratch ), 16548 static_cast<VkDeviceSize>( scratchSize ), 16549 reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); 16550 } 16551 16552 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16553 template <typename Dispatch> dispatchGraphIndirectAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,Dispatch const & d) const16554 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 16555 VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, 16556 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, 16557 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16558 { 16559 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16560 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16561 VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" ); 16562 # endif 16563 16564 d.vkCmdDispatchGraphIndirectAMDX( m_commandBuffer, 16565 static_cast<VkDeviceAddress>( scratch ), 16566 static_cast<VkDeviceSize>( scratchSize ), 16567 reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); 16568 } 16569 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16570 16571 template <typename Dispatch> dispatchGraphIndirectCountAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,Dispatch const & d) const16572 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 16573 VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, 16574 VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, 16575 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16576 { 16577 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16578 d.vkCmdDispatchGraphIndirectCountAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), 16579 static_cast<VkDeviceAddress>( scratch ), 16580 static_cast<VkDeviceSize>( scratchSize ), 16581 static_cast<VkDeviceAddress>( countInfo ) ); 16582 } 16583 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 16584 16585 //=== VK_EXT_sample_locations === 16586 16587 template <typename Dispatch> setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,Dispatch const & d) const16588 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, 16589 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16590 { 16591 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16592 d.vkCmdSetSampleLocationsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) ); 16593 } 16594 16595 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16596 template <typename Dispatch> setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,Dispatch const & d) const16597 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, 16598 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16599 { 16600 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16601 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16602 VULKAN_HPP_ASSERT( d.vkCmdSetSampleLocationsEXT && "Function <vkCmdSetSampleLocationsEXT> requires <VK_EXT_sample_locations>" ); 16603 # endif 16604 16605 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) ); 16606 } 16607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16608 16609 template <typename Dispatch> getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,Dispatch const & d) const16610 VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 16611 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, 16612 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16613 { 16614 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16615 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), 16616 static_cast<VkSampleCountFlagBits>( samples ), 16617 reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) ); 16618 } 16619 16620 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16621 template <typename Dispatch> 16622 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,Dispatch const & d) const16623 PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16624 { 16625 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16626 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16627 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMultisamplePropertiesEXT && 16628 "Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> requires <VK_EXT_sample_locations>" ); 16629 # endif 16630 16631 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; 16632 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( 16633 m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) ); 16634 16635 return multisampleProperties; 16636 } 16637 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16638 16639 //=== VK_KHR_get_memory_requirements2 === 16640 16641 template <typename Dispatch> getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const16642 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 16643 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 16644 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16645 { 16646 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16647 d.vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), 16648 reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), 16649 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 16650 } 16651 16652 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16653 template <typename Dispatch> 16654 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const16655 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16656 { 16657 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16658 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16659 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && 16660 "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16661 # endif 16662 16663 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 16664 d.vkGetImageMemoryRequirements2KHR( 16665 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16666 16667 return memoryRequirements; 16668 } 16669 16670 template <typename X, typename Y, typename... Z, typename Dispatch> 16671 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const16672 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16673 { 16674 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16675 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16676 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && 16677 "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16678 # endif 16679 16680 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 16681 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 16682 d.vkGetImageMemoryRequirements2KHR( 16683 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16684 16685 return structureChain; 16686 } 16687 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16688 16689 template <typename Dispatch> getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const16690 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 16691 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 16692 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16693 { 16694 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16695 d.vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), 16696 reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), 16697 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 16698 } 16699 16700 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16701 template <typename Dispatch> 16702 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const16703 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16704 { 16705 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16706 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16707 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && 16708 "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16709 # endif 16710 16711 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 16712 d.vkGetBufferMemoryRequirements2KHR( 16713 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16714 16715 return memoryRequirements; 16716 } 16717 16718 template <typename X, typename Y, typename... Z, typename Dispatch> 16719 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const16720 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16721 { 16722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16723 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16724 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && 16725 "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16726 # endif 16727 16728 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 16729 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 16730 d.vkGetBufferMemoryRequirements2KHR( 16731 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16732 16733 return structureChain; 16734 } 16735 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16736 16737 template <typename Dispatch> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const16738 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 16739 uint32_t * pSparseMemoryRequirementCount, 16740 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 16741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16742 { 16743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16744 d.vkGetImageSparseMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), 16745 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 16746 pSparseMemoryRequirementCount, 16747 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 16748 } 16749 16750 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16751 template <typename SparseImageMemoryRequirements2Allocator, 16752 typename Dispatch, 16753 typename std::enable_if< 16754 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 16755 int>::type> 16756 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const16757 Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const 16758 { 16759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16760 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16761 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && 16762 "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16763 # endif 16764 16765 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 16766 uint32_t sparseMemoryRequirementCount; 16767 d.vkGetImageSparseMemoryRequirements2KHR( 16768 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 16769 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 16770 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 16771 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 16772 &sparseMemoryRequirementCount, 16773 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 16774 16775 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 16776 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 16777 { 16778 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 16779 } 16780 return sparseMemoryRequirements; 16781 } 16782 16783 template <typename SparseImageMemoryRequirements2Allocator, 16784 typename Dispatch, 16785 typename std::enable_if< 16786 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 16787 int>::type> 16788 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const16789 Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, 16790 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 16791 Dispatch const & d ) const 16792 { 16793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16794 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16795 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && 16796 "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 16797 # endif 16798 16799 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 16800 sparseImageMemoryRequirements2Allocator ); 16801 uint32_t sparseMemoryRequirementCount; 16802 d.vkGetImageSparseMemoryRequirements2KHR( 16803 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 16804 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 16805 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 16806 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 16807 &sparseMemoryRequirementCount, 16808 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 16809 16810 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 16811 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 16812 { 16813 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 16814 } 16815 return sparseMemoryRequirements; 16816 } 16817 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16818 16819 //=== VK_KHR_acceleration_structure === 16820 16821 template <typename Dispatch> 16822 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,Dispatch const & d) const16823 Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, 16824 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16825 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, 16826 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16827 { 16828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16829 return static_cast<Result>( d.vkCreateAccelerationStructureKHR( static_cast<VkDevice>( m_device ), 16830 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), 16831 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 16832 reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) ); 16833 } 16834 16835 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16836 template <typename Dispatch> 16837 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16838 Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, 16839 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16840 Dispatch const & d ) const 16841 { 16842 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16843 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16844 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16845 # endif 16846 16847 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 16848 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR( 16849 m_device, 16850 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 16851 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16852 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 16853 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); 16854 16855 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); 16856 } 16857 16858 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16859 template <typename Dispatch> 16860 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type createAccelerationStructureKHRUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16861 Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, 16862 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16863 Dispatch const & d ) const 16864 { 16865 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16866 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16867 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16868 # endif 16869 16870 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 16871 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR( 16872 m_device, 16873 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 16874 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16875 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 16876 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); 16877 16878 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 16879 UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( 16880 accelerationStructure, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 16881 } 16882 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 16883 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16884 16885 template <typename Dispatch> destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16886 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 16887 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16888 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16889 { 16890 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16891 d.vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), 16892 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 16893 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16894 } 16895 16896 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16897 template <typename Dispatch> destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16898 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 16899 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16900 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16901 { 16902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16903 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16904 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16905 # endif 16906 16907 d.vkDestroyAccelerationStructureKHR( 16908 m_device, 16909 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 16910 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16911 } 16912 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16913 16914 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16915 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 16916 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16917 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16918 { 16919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16920 d.vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), 16921 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 16922 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16923 } 16924 16925 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16926 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16927 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 16928 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16929 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16930 { 16931 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16932 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16933 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16934 # endif 16935 16936 d.vkDestroyAccelerationStructureKHR( 16937 m_device, 16938 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 16939 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16940 } 16941 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16942 16943 template <typename Dispatch> 16944 VULKAN_HPP_INLINE void buildAccelerationStructuresKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const16945 CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, 16946 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 16947 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 16948 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16949 { 16950 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16951 d.vkCmdBuildAccelerationStructuresKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 16952 infoCount, 16953 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 16954 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ); 16955 } 16956 16957 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16958 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const16959 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( 16960 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 16961 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 16962 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 16963 { 16964 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16965 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16966 VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresKHR && "Function <vkCmdBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" ); 16967 # endif 16968 # ifdef VULKAN_HPP_NO_EXCEPTIONS 16969 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 16970 # else 16971 if ( infos.size() != pBuildRangeInfos.size() ) 16972 { 16973 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 16974 } 16975 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 16976 16977 d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, 16978 infos.size(), 16979 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 16980 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ); 16981 } 16982 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16983 16984 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,const uint32_t * pIndirectStrides,const uint32_t * const * ppMaxPrimitiveCounts,Dispatch const & d) const16985 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, 16986 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 16987 const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, 16988 const uint32_t * pIndirectStrides, 16989 const uint32_t * const * ppMaxPrimitiveCounts, 16990 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16991 { 16992 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16993 d.vkCmdBuildAccelerationStructuresIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 16994 infoCount, 16995 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 16996 reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ), 16997 pIndirectStrides, 16998 ppMaxPrimitiveCounts ); 16999 } 17000 17001 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17002 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,Dispatch const & d) const17003 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( 17004 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 17005 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, 17006 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides, 17007 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts, 17008 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 17009 { 17010 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17011 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17012 VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresIndirectKHR && 17013 "Function <vkCmdBuildAccelerationStructuresIndirectKHR> requires <VK_KHR_acceleration_structure>" ); 17014 # endif 17015 # ifdef VULKAN_HPP_NO_EXCEPTIONS 17016 VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); 17017 VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); 17018 VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); 17019 # else 17020 if ( infos.size() != indirectDeviceAddresses.size() ) 17021 { 17022 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); 17023 } 17024 if ( infos.size() != indirectStrides.size() ) 17025 { 17026 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); 17027 } 17028 if ( infos.size() != pMaxPrimitiveCounts.size() ) 17029 { 17030 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); 17031 } 17032 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 17033 17034 d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, 17035 infos.size(), 17036 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 17037 reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ), 17038 indirectStrides.data(), 17039 pMaxPrimitiveCounts.data() ); 17040 } 17041 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17042 17043 template <typename Dispatch> 17044 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const17045 Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17046 uint32_t infoCount, 17047 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 17048 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 17049 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17050 { 17051 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17052 return static_cast<Result>( 17053 d.vkBuildAccelerationStructuresKHR( static_cast<VkDevice>( m_device ), 17054 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17055 infoCount, 17056 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 17057 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) ); 17058 } 17059 17060 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17061 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const17062 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( 17063 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17064 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 17065 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 17066 Dispatch const & d ) const 17067 { 17068 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17069 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17070 VULKAN_HPP_ASSERT( d.vkBuildAccelerationStructuresKHR && "Function <vkBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" ); 17071 # endif 17072 # ifdef VULKAN_HPP_NO_EXCEPTIONS 17073 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 17074 # else 17075 if ( infos.size() != pBuildRangeInfos.size() ) 17076 { 17077 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 17078 } 17079 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 17080 17081 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17082 d.vkBuildAccelerationStructuresKHR( m_device, 17083 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17084 infos.size(), 17085 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 17086 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) ); 17087 VULKAN_HPP_NAMESPACE::detail::resultCheck( 17088 result, 17089 VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", 17090 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 17091 17092 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 17093 } 17094 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17095 17096 template <typename Dispatch> copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const17097 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17098 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 17099 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17100 { 17101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17102 return static_cast<Result>( d.vkCopyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), 17103 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17104 reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) ); 17105 } 17106 17107 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17108 template <typename Dispatch> 17109 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const17110 Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17111 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, 17112 Dispatch const & d ) const 17113 { 17114 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17115 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17116 VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureKHR && "Function <vkCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 17117 # endif 17118 17119 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureKHR( 17120 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) ); 17121 VULKAN_HPP_NAMESPACE::detail::resultCheck( 17122 result, 17123 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", 17124 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 17125 17126 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 17127 } 17128 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17129 17130 template <typename Dispatch> 17131 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const17132 Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17133 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 17134 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17135 { 17136 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17137 return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( static_cast<VkDevice>( m_device ), 17138 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17139 reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) ); 17140 } 17141 17142 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17143 template <typename Dispatch> 17144 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const17145 Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17146 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, 17147 Dispatch const & d ) const 17148 { 17149 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17150 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17151 VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureToMemoryKHR && 17152 "Function <vkCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" ); 17153 # endif 17154 17155 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureToMemoryKHR( 17156 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) ); 17157 VULKAN_HPP_NAMESPACE::detail::resultCheck( 17158 result, 17159 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", 17160 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 17161 17162 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 17163 } 17164 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17165 17166 template <typename Dispatch> 17167 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const17168 Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17169 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 17170 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17171 { 17172 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17173 return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( static_cast<VkDevice>( m_device ), 17174 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17175 reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) ); 17176 } 17177 17178 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17179 template <typename Dispatch> 17180 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const17181 Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17182 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, 17183 Dispatch const & d ) const 17184 { 17185 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17186 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17187 VULKAN_HPP_ASSERT( d.vkCopyMemoryToAccelerationStructureKHR && 17188 "Function <vkCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 17189 # endif 17190 17191 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToAccelerationStructureKHR( 17192 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) ); 17193 VULKAN_HPP_NAMESPACE::detail::resultCheck( 17194 result, 17195 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", 17196 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 17197 17198 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 17199 } 17200 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17201 17202 template <typename Dispatch> 17203 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,void * pData,size_t stride,Dispatch const & d) const17204 Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, 17205 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 17206 VULKAN_HPP_NAMESPACE::QueryType queryType, 17207 size_t dataSize, 17208 void * pData, 17209 size_t stride, 17210 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17211 { 17212 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17213 return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ), 17214 accelerationStructureCount, 17215 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 17216 static_cast<VkQueryType>( queryType ), 17217 dataSize, 17218 pData, 17219 stride ) ); 17220 } 17221 17222 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17223 template <typename DataType, 17224 typename DataTypeAllocator, 17225 typename Dispatch, 17226 typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type> 17227 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeAccelerationStructuresPropertiesKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,size_t stride,Dispatch const & d) const17228 Device::writeAccelerationStructuresPropertiesKHR( 17229 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 17230 VULKAN_HPP_NAMESPACE::QueryType queryType, 17231 size_t dataSize, 17232 size_t stride, 17233 Dispatch const & d ) const 17234 { 17235 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17236 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17237 VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && 17238 "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); 17239 # endif 17240 17241 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 17242 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 17243 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17244 d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 17245 accelerationStructures.size(), 17246 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 17247 static_cast<VkQueryType>( queryType ), 17248 data.size() * sizeof( DataType ), 17249 reinterpret_cast<void *>( data.data() ), 17250 stride ) ); 17251 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); 17252 17253 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17254 } 17255 17256 template <typename DataType, typename Dispatch> writeAccelerationStructuresPropertyKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t stride,Dispatch const & d) const17257 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeAccelerationStructuresPropertyKHR( 17258 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 17259 VULKAN_HPP_NAMESPACE::QueryType queryType, 17260 size_t stride, 17261 Dispatch const & d ) const 17262 { 17263 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17264 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17265 VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && 17266 "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); 17267 # endif 17268 17269 DataType data; 17270 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17271 d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 17272 accelerationStructures.size(), 17273 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 17274 static_cast<VkQueryType>( queryType ), 17275 sizeof( DataType ), 17276 reinterpret_cast<void *>( &data ), 17277 stride ) ); 17278 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); 17279 17280 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17281 } 17282 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17283 17284 template <typename Dispatch> copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const17285 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 17286 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17287 { 17288 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17289 d.vkCmdCopyAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 17290 reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ); 17291 } 17292 17293 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17294 template <typename Dispatch> copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const17295 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, 17296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17297 { 17298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17299 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17300 VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureKHR && "Function <vkCmdCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 17301 # endif 17302 17303 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ); 17304 } 17305 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17306 17307 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const17308 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 17309 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17310 { 17311 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17312 d.vkCmdCopyAccelerationStructureToMemoryKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 17313 reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ); 17314 } 17315 17316 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17317 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const17318 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, 17319 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17320 { 17321 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17322 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17323 VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureToMemoryKHR && 17324 "Function <vkCmdCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" ); 17325 # endif 17326 17327 d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ); 17328 } 17329 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17330 17331 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const17332 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 17333 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17334 { 17335 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17336 d.vkCmdCopyMemoryToAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 17337 reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ); 17338 } 17339 17340 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17341 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const17342 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, 17343 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17344 { 17345 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17346 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17347 VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToAccelerationStructureKHR && 17348 "Function <vkCmdCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 17349 # endif 17350 17351 d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ); 17352 } 17353 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17354 17355 template <typename Dispatch> getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,Dispatch const & d) const17356 VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, 17357 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17358 { 17359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17360 return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR( 17361 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) ); 17362 } 17363 17364 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17365 template <typename Dispatch> 17366 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,Dispatch const & d) const17367 Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, 17368 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17369 { 17370 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17371 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17372 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureDeviceAddressKHR && 17373 "Function <vkGetAccelerationStructureDeviceAddressKHR> requires <VK_KHR_acceleration_structure>" ); 17374 # endif 17375 17376 VkDeviceAddress result = 17377 d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) ); 17378 17379 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 17380 } 17381 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17382 17383 template <typename Dispatch> 17384 VULKAN_HPP_INLINE void writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const17385 CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, 17386 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 17387 VULKAN_HPP_NAMESPACE::QueryType queryType, 17388 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 17389 uint32_t firstQuery, 17390 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17391 { 17392 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17393 d.vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 17394 accelerationStructureCount, 17395 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 17396 static_cast<VkQueryType>( queryType ), 17397 static_cast<VkQueryPool>( queryPool ), 17398 firstQuery ); 17399 } 17400 17401 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17402 template <typename Dispatch> writeAccelerationStructuresPropertiesKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const17403 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( 17404 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 17405 VULKAN_HPP_NAMESPACE::QueryType queryType, 17406 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 17407 uint32_t firstQuery, 17408 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17409 { 17410 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17411 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17412 VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesKHR && 17413 "Function <vkCmdWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); 17414 # endif 17415 17416 d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, 17417 accelerationStructures.size(), 17418 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 17419 static_cast<VkQueryType>( queryType ), 17420 static_cast<VkQueryPool>( queryPool ), 17421 firstQuery ); 17422 } 17423 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17424 17425 template <typename Dispatch> getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,Dispatch const & d) const17426 VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, 17427 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, 17428 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17429 { 17430 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17431 d.vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast<VkDevice>( m_device ), 17432 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ), 17433 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); 17434 } 17435 17436 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17437 template <typename Dispatch> 17438 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,Dispatch const & d) const17439 Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, 17440 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17441 { 17442 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17443 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17444 VULKAN_HPP_ASSERT( d.vkGetDeviceAccelerationStructureCompatibilityKHR && 17445 "Function <vkGetDeviceAccelerationStructureCompatibilityKHR> requires <VK_KHR_acceleration_structure>" ); 17446 # endif 17447 17448 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; 17449 d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, 17450 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ), 17451 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); 17452 17453 return compatibility; 17454 } 17455 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17456 17457 template <typename Dispatch> getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,const uint32_t * pMaxPrimitiveCounts,VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,Dispatch const & d) const17458 VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 17459 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, 17460 const uint32_t * pMaxPrimitiveCounts, 17461 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, 17462 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17463 { 17464 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17465 d.vkGetAccelerationStructureBuildSizesKHR( static_cast<VkDevice>( m_device ), 17466 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 17467 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ), 17468 pMaxPrimitiveCounts, 17469 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) ); 17470 } 17471 17472 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17473 template <typename Dispatch> 17474 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts,Dispatch const & d) const17475 Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 17476 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, 17477 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts, 17478 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 17479 { 17480 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17481 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17482 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureBuildSizesKHR && 17483 "Function <vkGetAccelerationStructureBuildSizesKHR> requires <VK_KHR_acceleration_structure>" ); 17484 # endif 17485 # ifdef VULKAN_HPP_NO_EXCEPTIONS 17486 VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); 17487 # else 17488 if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) 17489 { 17490 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); 17491 } 17492 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 17493 17494 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; 17495 d.vkGetAccelerationStructureBuildSizesKHR( m_device, 17496 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 17497 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ), 17498 maxPrimitiveCounts.data(), 17499 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) ); 17500 17501 return sizeInfo; 17502 } 17503 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17504 17505 //=== VK_KHR_ray_tracing_pipeline === 17506 17507 template <typename Dispatch> traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const17508 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 17509 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 17510 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 17511 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 17512 uint32_t width, 17513 uint32_t height, 17514 uint32_t depth, 17515 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17516 { 17517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17518 d.vkCmdTraceRaysKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 17519 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 17520 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 17521 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 17522 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 17523 width, 17524 height, 17525 depth ); 17526 } 17527 17528 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17529 template <typename Dispatch> traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const17530 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 17531 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, 17532 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, 17533 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, 17534 uint32_t width, 17535 uint32_t height, 17536 uint32_t depth, 17537 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17538 { 17539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17540 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17541 VULKAN_HPP_ASSERT( d.vkCmdTraceRaysKHR && "Function <vkCmdTraceRaysKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17542 # endif 17543 17544 d.vkCmdTraceRaysKHR( m_commandBuffer, 17545 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 17546 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 17547 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 17548 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 17549 width, 17550 height, 17551 depth ); 17552 } 17553 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17554 17555 template <typename Dispatch> 17556 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const17557 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17558 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17559 uint32_t createInfoCount, 17560 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, 17561 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17562 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 17563 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17564 { 17565 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17566 return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( m_device ), 17567 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17568 static_cast<VkPipelineCache>( pipelineCache ), 17569 createInfoCount, 17570 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ), 17571 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17572 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 17573 } 17574 17575 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17576 template <typename PipelineAllocator, 17577 typename Dispatch, 17578 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 17579 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17580 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17581 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17582 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17583 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17584 Dispatch const & d ) const 17585 { 17586 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17587 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17588 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17589 # endif 17590 17591 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 17592 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17593 m_device, 17594 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17595 static_cast<VkPipelineCache>( pipelineCache ), 17596 createInfos.size(), 17597 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17598 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17599 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17600 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17601 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 17602 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17603 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17604 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17605 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17606 17607 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 17608 } 17609 17610 template <typename PipelineAllocator, 17611 typename Dispatch, 17612 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 17613 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const17614 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17615 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17616 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17617 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17618 PipelineAllocator & pipelineAllocator, 17619 Dispatch const & d ) const 17620 { 17621 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17622 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17623 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17624 # endif 17625 17626 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 17627 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17628 m_device, 17629 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17630 static_cast<VkPipelineCache>( pipelineCache ), 17631 createInfos.size(), 17632 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17633 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17634 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17635 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17636 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 17637 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17638 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17639 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17640 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17641 17642 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 17643 } 17644 17645 template <typename Dispatch> 17646 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17647 Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17648 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17649 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 17650 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17651 Dispatch const & d ) const 17652 { 17653 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17654 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17655 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17656 # endif 17657 17658 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 17659 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17660 m_device, 17661 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17662 static_cast<VkPipelineCache>( pipelineCache ), 17663 1, 17664 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 17665 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17666 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 17667 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17668 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", 17669 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17670 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17671 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17672 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17673 17674 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 17675 } 17676 17677 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17678 template < 17679 typename Dispatch, 17680 typename PipelineAllocator, 17681 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 17682 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17683 Device::createRayTracingPipelinesKHRUnique( 17684 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17685 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17686 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17687 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17688 Dispatch const & d ) const 17689 { 17690 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17691 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17692 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17693 # endif 17694 17695 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 17696 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17697 m_device, 17698 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17699 static_cast<VkPipelineCache>( pipelineCache ), 17700 createInfos.size(), 17701 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17702 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17703 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17704 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17705 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 17706 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17707 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17708 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17709 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17710 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 17711 uniquePipelines.reserve( createInfos.size() ); 17712 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 17713 for ( auto const & pipeline : pipelines ) 17714 { 17715 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 17716 } 17717 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 17718 } 17719 17720 template < 17721 typename Dispatch, 17722 typename PipelineAllocator, 17723 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 17724 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const17725 Device::createRayTracingPipelinesKHRUnique( 17726 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17727 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17728 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17729 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17730 PipelineAllocator & pipelineAllocator, 17731 Dispatch const & d ) const 17732 { 17733 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17734 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17735 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17736 # endif 17737 17738 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 17739 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17740 m_device, 17741 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17742 static_cast<VkPipelineCache>( pipelineCache ), 17743 createInfos.size(), 17744 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17745 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17746 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17747 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17748 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 17749 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17750 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17751 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17752 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17753 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 17754 uniquePipelines.reserve( createInfos.size() ); 17755 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 17756 for ( auto const & pipeline : pipelines ) 17757 { 17758 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 17759 } 17760 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 17761 } 17762 17763 template <typename Dispatch> 17764 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17765 Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17766 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17767 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 17768 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17769 Dispatch const & d ) const 17770 { 17771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17772 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17773 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17774 # endif 17775 17776 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 17777 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 17778 m_device, 17779 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17780 static_cast<VkPipelineCache>( pipelineCache ), 17781 1, 17782 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 17783 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17784 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 17785 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17786 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", 17787 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17788 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17789 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17790 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17791 17792 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 17793 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 17794 } 17795 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17796 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17797 17798 template <typename Dispatch> getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const17799 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17800 uint32_t firstGroup, 17801 uint32_t groupCount, 17802 size_t dataSize, 17803 void * pData, 17804 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17805 { 17806 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17807 return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 17808 static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 17809 } 17810 17811 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17812 template <typename DataType, 17813 typename DataTypeAllocator, 17814 typename Dispatch, 17815 typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type> getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const17816 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesKHR( 17817 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 17818 { 17819 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17820 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17821 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && 17822 "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 17823 # endif 17824 17825 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 17826 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 17827 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 17828 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 17829 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); 17830 17831 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17832 } 17833 17834 template <typename DataType, typename Dispatch> 17835 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const17836 Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 17837 { 17838 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17839 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17840 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && 17841 "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 17842 # endif 17843 17844 DataType data; 17845 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 17846 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 17847 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); 17848 17849 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17850 } 17851 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17852 17853 template <typename Dispatch> getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const17854 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17855 uint32_t firstGroup, 17856 uint32_t groupCount, 17857 size_t dataSize, 17858 void * pData, 17859 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17860 { 17861 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17862 return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 17863 static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 17864 } 17865 17866 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17867 template <typename DataType, 17868 typename DataTypeAllocator, 17869 typename Dispatch, 17870 typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type> 17871 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const17872 Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( 17873 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 17874 { 17875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17876 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17877 VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && 17878 "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17879 # endif 17880 17881 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 17882 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 17883 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 17884 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 17885 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); 17886 17887 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17888 } 17889 17890 template <typename DataType, typename Dispatch> getRayTracingCaptureReplayShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const17891 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( 17892 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 17893 { 17894 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17895 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17896 VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && 17897 "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17898 # endif 17899 17900 DataType data; 17901 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 17902 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 17903 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); 17904 17905 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17906 } 17907 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17908 17909 template <typename Dispatch> traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const17910 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 17911 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 17912 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 17913 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 17914 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 17915 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17916 { 17917 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17918 d.vkCmdTraceRaysIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 17919 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 17920 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 17921 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 17922 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 17923 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 17924 } 17925 17926 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17927 template <typename Dispatch> traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const17928 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 17929 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, 17930 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, 17931 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, 17932 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 17933 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17934 { 17935 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17936 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17937 VULKAN_HPP_ASSERT( d.vkCmdTraceRaysIndirectKHR && "Function <vkCmdTraceRaysIndirectKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 17938 # endif 17939 17940 d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, 17941 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 17942 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 17943 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 17944 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 17945 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 17946 } 17947 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17948 17949 template <typename Dispatch> getRayTracingShaderGroupStackSizeKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t group,VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,Dispatch const & d) const17950 VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17951 uint32_t group, 17952 VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, 17953 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17954 { 17955 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17956 return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR( 17957 static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) ); 17958 } 17959 17960 template <typename Dispatch> setRayTracingPipelineStackSizeKHR(uint32_t pipelineStackSize,Dispatch const & d) const17961 VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17962 { 17963 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17964 d.vkCmdSetRayTracingPipelineStackSizeKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), pipelineStackSize ); 17965 } 17966 17967 //=== VK_KHR_sampler_ycbcr_conversion === 17968 17969 template <typename Dispatch> 17970 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const17971 Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 17972 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17973 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 17974 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17975 { 17976 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17977 return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( static_cast<VkDevice>( m_device ), 17978 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 17979 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17980 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 17981 } 17982 17983 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17984 template <typename Dispatch> 17985 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17986 Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 17987 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17988 Dispatch const & d ) const 17989 { 17990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17991 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17992 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && 17993 "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 17994 # endif 17995 17996 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 17997 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR( 17998 m_device, 17999 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 18000 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18001 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 18002 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); 18003 18004 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); 18005 } 18006 18007 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18008 template <typename Dispatch> 18009 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionKHRUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18010 Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 18011 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18012 Dispatch const & d ) const 18013 { 18014 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18015 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18016 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && 18017 "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 18018 # endif 18019 18020 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 18021 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR( 18022 m_device, 18023 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 18024 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18025 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 18026 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); 18027 18028 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 18029 result, 18030 UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 18031 } 18032 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18033 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18034 18035 template <typename Dispatch> destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const18036 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 18037 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18038 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18039 { 18040 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18041 d.vkDestroySamplerYcbcrConversionKHR( static_cast<VkDevice>( m_device ), 18042 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 18043 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 18044 } 18045 18046 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18047 template <typename Dispatch> destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18048 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 18049 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18050 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18051 { 18052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18053 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18054 VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversionKHR && 18055 "Function <vkDestroySamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 18056 # endif 18057 18058 d.vkDestroySamplerYcbcrConversionKHR( 18059 m_device, 18060 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 18061 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 18062 } 18063 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18064 18065 //=== VK_KHR_bind_memory2 === 18066 18067 template <typename Dispatch> bindBufferMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const18068 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, 18069 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 18070 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18071 { 18072 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18073 return static_cast<Result>( 18074 d.vkBindBufferMemory2KHR( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 18075 } 18076 18077 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18078 template <typename Dispatch> 18079 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const18080 Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, 18081 Dispatch const & d ) const 18082 { 18083 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18084 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18085 VULKAN_HPP_ASSERT( d.vkBindBufferMemory2KHR && "Function <vkBindBufferMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 18086 # endif 18087 18088 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18089 d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 18090 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); 18091 18092 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18093 } 18094 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18095 18096 template <typename Dispatch> bindImageMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const18097 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, 18098 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 18099 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18100 { 18101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18102 return static_cast<Result>( 18103 d.vkBindImageMemory2KHR( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 18104 } 18105 18106 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18107 template <typename Dispatch> 18108 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const18109 Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const 18110 { 18111 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18112 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18113 VULKAN_HPP_ASSERT( d.vkBindImageMemory2KHR && "Function <vkBindImageMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 18114 # endif 18115 18116 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18117 d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 18118 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); 18119 18120 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18121 } 18122 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18123 18124 //=== VK_EXT_image_drm_format_modifier === 18125 18126 template <typename Dispatch> getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,Dispatch const & d) const18127 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( 18128 VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18129 { 18130 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18131 return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 18132 static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) ); 18133 } 18134 18135 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18136 template <typename Dispatch> 18137 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const18138 Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 18139 { 18140 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18141 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18142 VULKAN_HPP_ASSERT( d.vkGetImageDrmFormatModifierPropertiesEXT && 18143 "Function <vkGetImageDrmFormatModifierPropertiesEXT> requires <VK_EXT_image_drm_format_modifier>" ); 18144 # endif 18145 18146 VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; 18147 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 18148 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) ); 18149 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); 18150 18151 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 18152 } 18153 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18154 18155 //=== VK_EXT_validation_cache === 18156 18157 template <typename Dispatch> createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,Dispatch const & d) const18158 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, 18159 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18160 VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, 18161 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18162 { 18163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18164 return static_cast<Result>( d.vkCreateValidationCacheEXT( static_cast<VkDevice>( m_device ), 18165 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), 18166 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 18167 reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) ); 18168 } 18169 18170 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18171 template <typename Dispatch> 18172 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18173 Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, 18174 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18175 Dispatch const & d ) const 18176 { 18177 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18178 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18179 VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 18180 # endif 18181 18182 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 18183 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT( 18184 m_device, 18185 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 18186 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18187 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 18188 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); 18189 18190 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( validationCache ) ); 18191 } 18192 18193 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18194 template <typename Dispatch> 18195 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type createValidationCacheEXTUnique(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18196 Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, 18197 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18198 Dispatch const & d ) const 18199 { 18200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18201 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18202 VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 18203 # endif 18204 18205 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 18206 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT( 18207 m_device, 18208 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 18209 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18210 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 18211 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); 18212 18213 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 18214 result, 18215 UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( validationCache, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 18216 } 18217 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18218 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18219 18220 template <typename Dispatch> destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const18221 VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 18222 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18223 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18224 { 18225 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18226 d.vkDestroyValidationCacheEXT( 18227 static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 18228 } 18229 18230 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18231 template <typename Dispatch> destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18232 VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 18233 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18234 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18235 { 18236 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18237 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18238 VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 18239 # endif 18240 18241 d.vkDestroyValidationCacheEXT( 18242 m_device, 18243 static_cast<VkValidationCacheEXT>( validationCache ), 18244 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 18245 } 18246 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18247 18248 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const18249 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 18250 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18251 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18252 { 18253 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18254 d.vkDestroyValidationCacheEXT( 18255 static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 18256 } 18257 18258 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18259 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18260 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 18261 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18262 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18263 { 18264 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18265 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18266 VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 18267 # endif 18268 18269 d.vkDestroyValidationCacheEXT( 18270 m_device, 18271 static_cast<VkValidationCacheEXT>( validationCache ), 18272 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 18273 } 18274 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18275 18276 template <typename Dispatch> mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,Dispatch const & d) const18277 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 18278 uint32_t srcCacheCount, 18279 const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, 18280 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18281 { 18282 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18283 return static_cast<Result>( d.vkMergeValidationCachesEXT( static_cast<VkDevice>( m_device ), 18284 static_cast<VkValidationCacheEXT>( dstCache ), 18285 srcCacheCount, 18286 reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) ); 18287 } 18288 18289 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18290 template <typename Dispatch> 18291 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,Dispatch const & d) const18292 Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 18293 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, 18294 Dispatch const & d ) const 18295 { 18296 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18297 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18298 VULKAN_HPP_ASSERT( d.vkMergeValidationCachesEXT && "Function <vkMergeValidationCachesEXT> requires <VK_EXT_validation_cache>" ); 18299 # endif 18300 18301 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergeValidationCachesEXT( 18302 m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) ); 18303 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); 18304 18305 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18306 } 18307 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18308 18309 template <typename Dispatch> getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,size_t * pDataSize,void * pData,Dispatch const & d) const18310 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 18311 size_t * pDataSize, 18312 void * pData, 18313 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18314 { 18315 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18316 return static_cast<Result>( 18317 d.vkGetValidationCacheDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) ); 18318 } 18319 18320 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18321 template <typename Uint8_tAllocator, 18322 typename Dispatch, 18323 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 18324 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Dispatch const & d) const18325 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const 18326 { 18327 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18328 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18329 VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" ); 18330 # endif 18331 18332 std::vector<uint8_t, Uint8_tAllocator> data; 18333 size_t dataSize; 18334 VULKAN_HPP_NAMESPACE::Result result; 18335 do 18336 { 18337 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18338 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 18339 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 18340 { 18341 data.resize( dataSize ); 18342 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18343 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 18344 } 18345 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18346 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 18347 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 18348 if ( dataSize < data.size() ) 18349 { 18350 data.resize( dataSize ); 18351 } 18352 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 18353 } 18354 18355 template <typename Uint8_tAllocator, 18356 typename Dispatch, 18357 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 18358 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const18359 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 18360 { 18361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18362 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18363 VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" ); 18364 # endif 18365 18366 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 18367 size_t dataSize; 18368 VULKAN_HPP_NAMESPACE::Result result; 18369 do 18370 { 18371 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18372 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 18373 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 18374 { 18375 data.resize( dataSize ); 18376 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18377 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 18378 } 18379 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18380 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 18381 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 18382 if ( dataSize < data.size() ) 18383 { 18384 data.resize( dataSize ); 18385 } 18386 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 18387 } 18388 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18389 18390 //=== VK_NV_shading_rate_image === 18391 18392 template <typename Dispatch> bindShadingRateImageNV(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,Dispatch const & d) const18393 VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, 18394 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 18395 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18396 { 18397 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18398 d.vkCmdBindShadingRateImageNV( 18399 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); 18400 } 18401 18402 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,Dispatch const & d) const18403 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, 18404 uint32_t viewportCount, 18405 const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, 18406 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18407 { 18408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18409 d.vkCmdSetViewportShadingRatePaletteNV( 18410 static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) ); 18411 } 18412 18413 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18414 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,Dispatch const & d) const18415 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( 18416 uint32_t firstViewport, 18417 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, 18418 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18419 { 18420 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18421 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18422 VULKAN_HPP_ASSERT( d.vkCmdSetViewportShadingRatePaletteNV && "Function <vkCmdSetViewportShadingRatePaletteNV> requires <VK_NV_shading_rate_image>" ); 18423 # endif 18424 18425 d.vkCmdSetViewportShadingRatePaletteNV( 18426 m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) ); 18427 } 18428 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18429 18430 template <typename Dispatch> setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,uint32_t customSampleOrderCount,const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,Dispatch const & d) const18431 VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 18432 uint32_t customSampleOrderCount, 18433 const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, 18434 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18435 { 18436 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18437 d.vkCmdSetCoarseSampleOrderNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 18438 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 18439 customSampleOrderCount, 18440 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) ); 18441 } 18442 18443 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18444 template <typename Dispatch> 18445 VULKAN_HPP_INLINE void setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,Dispatch const & d) const18446 CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 18447 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, 18448 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18449 { 18450 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18451 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18452 VULKAN_HPP_ASSERT( d.vkCmdSetCoarseSampleOrderNV && "Function <vkCmdSetCoarseSampleOrderNV> requires <VK_NV_shading_rate_image>" ); 18453 # endif 18454 18455 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, 18456 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 18457 customSampleOrders.size(), 18458 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) ); 18459 } 18460 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18461 18462 //=== VK_NV_ray_tracing === 18463 18464 template <typename Dispatch> 18465 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,Dispatch const & d) const18466 Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, 18467 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18468 VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, 18469 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18470 { 18471 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18472 return static_cast<Result>( d.vkCreateAccelerationStructureNV( static_cast<VkDevice>( m_device ), 18473 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), 18474 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 18475 reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) ); 18476 } 18477 18478 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18479 template <typename Dispatch> 18480 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18481 Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, 18482 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18483 Dispatch const & d ) const 18484 { 18485 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18486 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18487 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 18488 # endif 18489 18490 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 18491 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV( 18492 m_device, 18493 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 18494 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18495 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 18496 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); 18497 18498 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); 18499 } 18500 18501 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18502 template <typename Dispatch> 18503 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type createAccelerationStructureNVUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18504 Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, 18505 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18506 Dispatch const & d ) const 18507 { 18508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18509 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18510 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 18511 # endif 18512 18513 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 18514 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV( 18515 m_device, 18516 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 18517 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18518 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 18519 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); 18520 18521 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 18522 UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( 18523 accelerationStructure, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 18524 } 18525 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18526 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18527 18528 template <typename Dispatch> destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const18529 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 18530 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18531 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18532 { 18533 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18534 d.vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ), 18535 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 18536 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 18537 } 18538 18539 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18540 template <typename Dispatch> destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18541 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 18542 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18543 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18544 { 18545 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18546 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18547 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 18548 # endif 18549 18550 d.vkDestroyAccelerationStructureNV( 18551 m_device, 18552 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 18553 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 18554 } 18555 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18556 18557 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const18558 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 18559 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18560 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18561 { 18562 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18563 d.vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ), 18564 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 18565 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 18566 } 18567 18568 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18569 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18570 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 18571 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18572 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18573 { 18574 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18575 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18576 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 18577 # endif 18578 18579 d.vkDestroyAccelerationStructureNV( 18580 m_device, 18581 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 18582 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 18583 } 18584 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18585 18586 template <typename Dispatch> 18587 VULKAN_HPP_INLINE void getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,Dispatch const & d) const18588 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, 18589 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, 18590 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18591 { 18592 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18593 d.vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ), 18594 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), 18595 reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) ); 18596 } 18597 18598 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18599 template <typename Dispatch> 18600 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const18601 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, 18602 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18603 { 18604 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18605 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18606 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && 18607 "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" ); 18608 # endif 18609 18610 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; 18611 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 18612 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 18613 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 18614 18615 return memoryRequirements; 18616 } 18617 18618 template <typename X, typename Y, typename... Z, typename Dispatch> 18619 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const18620 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, 18621 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18622 { 18623 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18624 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18625 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && 18626 "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" ); 18627 # endif 18628 18629 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 18630 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>(); 18631 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 18632 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 18633 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 18634 18635 return structureChain; 18636 } 18637 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18638 18639 template <typename Dispatch> bindAccelerationStructureMemoryNV(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,Dispatch const & d) const18640 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( 18641 uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18642 { 18643 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18644 return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( 18645 static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) ); 18646 } 18647 18648 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18649 template <typename Dispatch> bindAccelerationStructureMemoryNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,Dispatch const & d) const18650 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( 18651 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const 18652 { 18653 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18654 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18655 VULKAN_HPP_ASSERT( d.vkBindAccelerationStructureMemoryNV && "Function <vkBindAccelerationStructureMemoryNV> requires <VK_NV_ray_tracing>" ); 18656 # endif 18657 18658 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindAccelerationStructureMemoryNV( 18659 m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) ); 18660 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); 18661 18662 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18663 } 18664 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18665 18666 template <typename Dispatch> buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const18667 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, 18668 VULKAN_HPP_NAMESPACE::Buffer instanceData, 18669 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 18670 VULKAN_HPP_NAMESPACE::Bool32 update, 18671 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 18672 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 18673 VULKAN_HPP_NAMESPACE::Buffer scratch, 18674 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 18675 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18676 { 18677 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18678 d.vkCmdBuildAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 18679 reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), 18680 static_cast<VkBuffer>( instanceData ), 18681 static_cast<VkDeviceSize>( instanceOffset ), 18682 static_cast<VkBool32>( update ), 18683 static_cast<VkAccelerationStructureNV>( dst ), 18684 static_cast<VkAccelerationStructureNV>( src ), 18685 static_cast<VkBuffer>( scratch ), 18686 static_cast<VkDeviceSize>( scratchOffset ) ); 18687 } 18688 18689 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18690 template <typename Dispatch> buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const18691 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, 18692 VULKAN_HPP_NAMESPACE::Buffer instanceData, 18693 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 18694 VULKAN_HPP_NAMESPACE::Bool32 update, 18695 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 18696 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 18697 VULKAN_HPP_NAMESPACE::Buffer scratch, 18698 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 18699 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18700 { 18701 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18702 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18703 VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructureNV && "Function <vkCmdBuildAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 18704 # endif 18705 18706 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, 18707 reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), 18708 static_cast<VkBuffer>( instanceData ), 18709 static_cast<VkDeviceSize>( instanceOffset ), 18710 static_cast<VkBool32>( update ), 18711 static_cast<VkAccelerationStructureNV>( dst ), 18712 static_cast<VkAccelerationStructureNV>( src ), 18713 static_cast<VkBuffer>( scratch ), 18714 static_cast<VkDeviceSize>( scratchOffset ) ); 18715 } 18716 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18717 18718 template <typename Dispatch> copyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,Dispatch const & d) const18719 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 18720 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 18721 VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, 18722 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18723 { 18724 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18725 d.vkCmdCopyAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 18726 static_cast<VkAccelerationStructureNV>( dst ), 18727 static_cast<VkAccelerationStructureNV>( src ), 18728 static_cast<VkCopyAccelerationStructureModeKHR>( mode ) ); 18729 } 18730 18731 template <typename Dispatch> traceRaysNV(VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const18732 VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, 18733 VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, 18734 VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, 18735 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, 18736 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, 18737 VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, 18738 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, 18739 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, 18740 VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, 18741 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, 18742 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, 18743 uint32_t width, 18744 uint32_t height, 18745 uint32_t depth, 18746 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18747 { 18748 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18749 d.vkCmdTraceRaysNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 18750 static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), 18751 static_cast<VkDeviceSize>( raygenShaderBindingOffset ), 18752 static_cast<VkBuffer>( missShaderBindingTableBuffer ), 18753 static_cast<VkDeviceSize>( missShaderBindingOffset ), 18754 static_cast<VkDeviceSize>( missShaderBindingStride ), 18755 static_cast<VkBuffer>( hitShaderBindingTableBuffer ), 18756 static_cast<VkDeviceSize>( hitShaderBindingOffset ), 18757 static_cast<VkDeviceSize>( hitShaderBindingStride ), 18758 static_cast<VkBuffer>( callableShaderBindingTableBuffer ), 18759 static_cast<VkDeviceSize>( callableShaderBindingOffset ), 18760 static_cast<VkDeviceSize>( callableShaderBindingStride ), 18761 width, 18762 height, 18763 depth ); 18764 } 18765 18766 template <typename Dispatch> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const18767 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18768 uint32_t createInfoCount, 18769 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, 18770 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18771 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 18772 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18773 { 18774 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18775 return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( m_device ), 18776 static_cast<VkPipelineCache>( pipelineCache ), 18777 createInfoCount, 18778 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), 18779 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 18780 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 18781 } 18782 18783 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18784 template <typename PipelineAllocator, 18785 typename Dispatch, 18786 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 18787 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18788 Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18789 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 18790 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18791 Dispatch const & d ) const 18792 { 18793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18794 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18795 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18796 # endif 18797 18798 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 18799 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18800 m_device, 18801 static_cast<VkPipelineCache>( pipelineCache ), 18802 createInfos.size(), 18803 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 18804 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18805 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 18806 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18807 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 18808 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18809 18810 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 18811 } 18812 18813 template <typename PipelineAllocator, 18814 typename Dispatch, 18815 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 18816 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const18817 Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18818 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 18819 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18820 PipelineAllocator & pipelineAllocator, 18821 Dispatch const & d ) const 18822 { 18823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18824 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18825 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18826 # endif 18827 18828 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 18829 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18830 m_device, 18831 static_cast<VkPipelineCache>( pipelineCache ), 18832 createInfos.size(), 18833 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 18834 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18835 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 18836 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18837 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 18838 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18839 18840 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 18841 } 18842 18843 template <typename Dispatch> 18844 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18845 Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18846 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 18847 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18848 Dispatch const & d ) const 18849 { 18850 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18851 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18852 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18853 # endif 18854 18855 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 18856 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18857 m_device, 18858 static_cast<VkPipelineCache>( pipelineCache ), 18859 1, 18860 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 18861 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18862 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 18863 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18864 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", 18865 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18866 18867 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 18868 } 18869 18870 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18871 template < 18872 typename Dispatch, 18873 typename PipelineAllocator, 18874 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 18875 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18876 Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18877 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 18878 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18879 Dispatch const & d ) const 18880 { 18881 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18882 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18883 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18884 # endif 18885 18886 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 18887 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18888 m_device, 18889 static_cast<VkPipelineCache>( pipelineCache ), 18890 createInfos.size(), 18891 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 18892 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18893 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 18894 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18895 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 18896 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18897 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 18898 uniquePipelines.reserve( createInfos.size() ); 18899 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 18900 for ( auto const & pipeline : pipelines ) 18901 { 18902 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 18903 } 18904 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 18905 } 18906 18907 template < 18908 typename Dispatch, 18909 typename PipelineAllocator, 18910 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 18911 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const18912 Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18913 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 18914 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18915 PipelineAllocator & pipelineAllocator, 18916 Dispatch const & d ) const 18917 { 18918 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18919 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18920 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18921 # endif 18922 18923 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 18924 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18925 m_device, 18926 static_cast<VkPipelineCache>( pipelineCache ), 18927 createInfos.size(), 18928 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 18929 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18930 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 18931 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18932 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 18933 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18934 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 18935 uniquePipelines.reserve( createInfos.size() ); 18936 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 18937 for ( auto const & pipeline : pipelines ) 18938 { 18939 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 18940 } 18941 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 18942 } 18943 18944 template <typename Dispatch> 18945 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18946 Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 18947 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 18948 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18949 Dispatch const & d ) const 18950 { 18951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18952 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18953 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 18954 # endif 18955 18956 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 18957 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 18958 m_device, 18959 static_cast<VkPipelineCache>( pipelineCache ), 18960 1, 18961 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 18962 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18963 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 18964 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 18965 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", 18966 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 18967 18968 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 18969 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 18970 } 18971 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18972 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18973 18974 template <typename Dispatch> getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const18975 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 18976 uint32_t firstGroup, 18977 uint32_t groupCount, 18978 size_t dataSize, 18979 void * pData, 18980 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18981 { 18982 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18983 return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( 18984 static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 18985 } 18986 18987 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18988 template <typename DataType, 18989 typename DataTypeAllocator, 18990 typename Dispatch, 18991 typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type> getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const18992 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesNV( 18993 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 18994 { 18995 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18996 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18997 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && 18998 "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 18999 # endif 19000 19001 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 19002 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 19003 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV( 19004 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 19005 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); 19006 19007 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 19008 } 19009 19010 template <typename DataType, typename Dispatch> 19011 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const19012 Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 19013 { 19014 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19015 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19016 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && 19017 "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 19018 # endif 19019 19020 DataType data; 19021 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV( 19022 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 19023 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); 19024 19025 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 19026 } 19027 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19028 19029 template <typename Dispatch> getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,void * pData,Dispatch const & d) const19030 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 19031 size_t dataSize, 19032 void * pData, 19033 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19034 { 19035 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19036 return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( 19037 static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) ); 19038 } 19039 19040 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19041 template <typename DataType, 19042 typename DataTypeAllocator, 19043 typename Dispatch, 19044 typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type> 19045 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,Dispatch const & d) const19046 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const 19047 { 19048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19049 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19050 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" ); 19051 # endif 19052 19053 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 19054 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 19055 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV( 19056 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 19057 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 19058 19059 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 19060 } 19061 19062 template <typename DataType, typename Dispatch> 19063 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Dispatch const & d) const19064 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const 19065 { 19066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19067 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19068 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" ); 19069 # endif 19070 19071 DataType data; 19072 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV( 19073 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 19074 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 19075 19076 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 19077 } 19078 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19079 19080 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const19081 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, 19082 const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, 19083 VULKAN_HPP_NAMESPACE::QueryType queryType, 19084 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 19085 uint32_t firstQuery, 19086 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19087 { 19088 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19089 d.vkCmdWriteAccelerationStructuresPropertiesNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 19090 accelerationStructureCount, 19091 reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ), 19092 static_cast<VkQueryType>( queryType ), 19093 static_cast<VkQueryPool>( queryPool ), 19094 firstQuery ); 19095 } 19096 19097 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19098 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const19099 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( 19100 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, 19101 VULKAN_HPP_NAMESPACE::QueryType queryType, 19102 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 19103 uint32_t firstQuery, 19104 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19105 { 19106 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19107 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19108 VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesNV && 19109 "Function <vkCmdWriteAccelerationStructuresPropertiesNV> requires <VK_NV_ray_tracing>" ); 19110 # endif 19111 19112 d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, 19113 accelerationStructures.size(), 19114 reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ), 19115 static_cast<VkQueryType>( queryType ), 19116 static_cast<VkQueryPool>( queryPool ), 19117 firstQuery ); 19118 } 19119 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19120 19121 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19122 template <typename Dispatch> compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const19123 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 19124 uint32_t shader, 19125 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19126 { 19127 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19128 return static_cast<Result>( d.vkCompileDeferredNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), shader ) ); 19129 } 19130 #else 19131 template <typename Dispatch> 19132 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const19133 Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const 19134 { 19135 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19136 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19137 VULKAN_HPP_ASSERT( d.vkCompileDeferredNV && "Function <vkCompileDeferredNV> requires <VK_NV_ray_tracing>" ); 19138 # endif 19139 19140 VULKAN_HPP_NAMESPACE::Result result = 19141 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); 19142 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); 19143 19144 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19145 } 19146 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 19147 19148 //=== VK_KHR_maintenance3 === 19149 19150 template <typename Dispatch> getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const19151 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 19152 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 19153 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19154 { 19155 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19156 d.vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ), 19157 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), 19158 reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 19159 } 19160 19161 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19162 template <typename Dispatch> 19163 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const19164 Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 19165 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19166 { 19167 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19168 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19169 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && 19170 "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 19171 # endif 19172 19173 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 19174 d.vkGetDescriptorSetLayoutSupportKHR( 19175 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 19176 19177 return support; 19178 } 19179 19180 template <typename X, typename Y, typename... Z, typename Dispatch> 19181 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const19182 Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 19183 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19184 { 19185 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19186 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19187 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && 19188 "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 19189 # endif 19190 19191 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 19192 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 19193 d.vkGetDescriptorSetLayoutSupportKHR( 19194 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 19195 19196 return structureChain; 19197 } 19198 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19199 19200 //=== VK_KHR_draw_indirect_count === 19201 19202 template <typename Dispatch> drawIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const19203 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 19204 VULKAN_HPP_NAMESPACE::DeviceSize offset, 19205 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 19206 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 19207 uint32_t maxDrawCount, 19208 uint32_t stride, 19209 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19210 { 19211 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19212 d.vkCmdDrawIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 19213 static_cast<VkBuffer>( buffer ), 19214 static_cast<VkDeviceSize>( offset ), 19215 static_cast<VkBuffer>( countBuffer ), 19216 static_cast<VkDeviceSize>( countBufferOffset ), 19217 maxDrawCount, 19218 stride ); 19219 } 19220 19221 template <typename Dispatch> drawIndexedIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const19222 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 19223 VULKAN_HPP_NAMESPACE::DeviceSize offset, 19224 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 19225 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 19226 uint32_t maxDrawCount, 19227 uint32_t stride, 19228 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19229 { 19230 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19231 d.vkCmdDrawIndexedIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 19232 static_cast<VkBuffer>( buffer ), 19233 static_cast<VkDeviceSize>( offset ), 19234 static_cast<VkBuffer>( countBuffer ), 19235 static_cast<VkDeviceSize>( countBufferOffset ), 19236 maxDrawCount, 19237 stride ); 19238 } 19239 19240 //=== VK_EXT_external_memory_host === 19241 19242 template <typename Dispatch> 19243 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,Dispatch const & d) const19244 Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 19245 const void * pHostPointer, 19246 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, 19247 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19248 { 19249 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19250 return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( static_cast<VkDevice>( m_device ), 19251 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 19252 pHostPointer, 19253 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) ); 19254 } 19255 19256 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19257 template <typename Dispatch> 19258 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,Dispatch const & d) const19259 Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 19260 const void * pHostPointer, 19261 Dispatch const & d ) const 19262 { 19263 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19264 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19265 VULKAN_HPP_ASSERT( d.vkGetMemoryHostPointerPropertiesEXT && "Function <vkGetMemoryHostPointerPropertiesEXT> requires <VK_EXT_external_memory_host>" ); 19266 # endif 19267 19268 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; 19269 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19270 d.vkGetMemoryHostPointerPropertiesEXT( m_device, 19271 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 19272 pHostPointer, 19273 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) ); 19274 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); 19275 19276 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryHostPointerProperties ) ); 19277 } 19278 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19279 19280 //=== VK_AMD_buffer_marker === 19281 19282 template <typename Dispatch> writeBufferMarkerAMD(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const19283 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 19284 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 19285 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 19286 uint32_t marker, 19287 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19288 { 19289 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19290 d.vkCmdWriteBufferMarkerAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), 19291 static_cast<VkPipelineStageFlagBits>( pipelineStage ), 19292 static_cast<VkBuffer>( dstBuffer ), 19293 static_cast<VkDeviceSize>( dstOffset ), 19294 marker ); 19295 } 19296 19297 template <typename Dispatch> writeBufferMarker2AMD(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const19298 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 19299 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 19300 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 19301 uint32_t marker, 19302 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19303 { 19304 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19305 d.vkCmdWriteBufferMarker2AMD( static_cast<VkCommandBuffer>( m_commandBuffer ), 19306 static_cast<VkPipelineStageFlags2>( stage ), 19307 static_cast<VkBuffer>( dstBuffer ), 19308 static_cast<VkDeviceSize>( dstOffset ), 19309 marker ); 19310 } 19311 19312 //=== VK_EXT_calibrated_timestamps === 19313 19314 template <typename Dispatch> getCalibrateableTimeDomainsEXT(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,Dispatch const & d) const19315 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, 19316 VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, 19317 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19318 { 19319 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19320 return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( 19321 static_cast<VkPhysicalDevice>( m_physicalDevice ), pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); 19322 } 19323 19324 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19325 template <typename TimeDomainKHRAllocator, 19326 typename Dispatch, 19327 typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 19328 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const & d) const19329 PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const 19330 { 19331 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19332 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19333 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && 19334 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 19335 # endif 19336 19337 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; 19338 uint32_t timeDomainCount; 19339 VULKAN_HPP_NAMESPACE::Result result; 19340 do 19341 { 19342 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 19343 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 19344 { 19345 timeDomains.resize( timeDomainCount ); 19346 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19347 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 19348 } 19349 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19350 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 19351 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 19352 if ( timeDomainCount < timeDomains.size() ) 19353 { 19354 timeDomains.resize( timeDomainCount ); 19355 } 19356 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 19357 } 19358 19359 template <typename TimeDomainKHRAllocator, 19360 typename Dispatch, 19361 typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 19362 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsEXT(TimeDomainKHRAllocator & timeDomainKHRAllocator,Dispatch const & d) const19363 PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const 19364 { 19365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19366 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19367 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && 19368 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 19369 # endif 19370 19371 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); 19372 uint32_t timeDomainCount; 19373 VULKAN_HPP_NAMESPACE::Result result; 19374 do 19375 { 19376 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 19377 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 19378 { 19379 timeDomains.resize( timeDomainCount ); 19380 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19381 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 19382 } 19383 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19384 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 19385 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 19386 if ( timeDomainCount < timeDomains.size() ) 19387 { 19388 timeDomains.resize( timeDomainCount ); 19389 } 19390 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 19391 } 19392 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19393 19394 template <typename Dispatch> getCalibratedTimestampsEXT(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const19395 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, 19396 const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, 19397 uint64_t * pTimestamps, 19398 uint64_t * pMaxDeviation, 19399 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19400 { 19401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19402 return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( static_cast<VkDevice>( m_device ), 19403 timestampCount, 19404 reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), 19405 pTimestamps, 19406 pMaxDeviation ) ); 19407 } 19408 19409 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19410 template <typename Uint64_tAllocator, 19411 typename Dispatch, 19412 typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type> 19413 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Dispatch const & d) const19414 Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 19415 Dispatch const & d ) const 19416 { 19417 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19418 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19419 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && 19420 "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 19421 # endif 19422 19423 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 19424 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); 19425 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 19426 uint64_t & maxDeviation = data_.second; 19427 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT( 19428 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 19429 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 19430 19431 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 19432 } 19433 19434 template <typename Uint64_tAllocator, 19435 typename Dispatch, 19436 typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type> 19437 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const19438 Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 19439 Uint64_tAllocator & uint64_tAllocator, 19440 Dispatch const & d ) const 19441 { 19442 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19443 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19444 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && 19445 "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 19446 # endif 19447 19448 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 19449 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); 19450 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 19451 uint64_t & maxDeviation = data_.second; 19452 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT( 19453 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 19454 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 19455 19456 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 19457 } 19458 19459 template <typename Dispatch> 19460 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type getCalibratedTimestampEXT(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo,Dispatch const & d) const19461 Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const 19462 { 19463 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19464 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19465 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && 19466 "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 19467 # endif 19468 19469 std::pair<uint64_t, uint64_t> data_; 19470 uint64_t & timestamp = data_.first; 19471 uint64_t & maxDeviation = data_.second; 19472 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19473 d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); 19474 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); 19475 19476 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 19477 } 19478 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19479 19480 //=== VK_NV_mesh_shader === 19481 19482 template <typename Dispatch> drawMeshTasksNV(uint32_t taskCount,uint32_t firstTask,Dispatch const & d) const19483 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19484 { 19485 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19486 d.vkCmdDrawMeshTasksNV( static_cast<VkCommandBuffer>( m_commandBuffer ), taskCount, firstTask ); 19487 } 19488 19489 template <typename Dispatch> drawMeshTasksIndirectNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const19490 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 19491 VULKAN_HPP_NAMESPACE::DeviceSize offset, 19492 uint32_t drawCount, 19493 uint32_t stride, 19494 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19495 { 19496 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19497 d.vkCmdDrawMeshTasksIndirectNV( 19498 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 19499 } 19500 19501 template <typename Dispatch> drawMeshTasksIndirectCountNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const19502 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 19503 VULKAN_HPP_NAMESPACE::DeviceSize offset, 19504 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 19505 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 19506 uint32_t maxDrawCount, 19507 uint32_t stride, 19508 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19509 { 19510 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19511 d.vkCmdDrawMeshTasksIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 19512 static_cast<VkBuffer>( buffer ), 19513 static_cast<VkDeviceSize>( offset ), 19514 static_cast<VkBuffer>( countBuffer ), 19515 static_cast<VkDeviceSize>( countBufferOffset ), 19516 maxDrawCount, 19517 stride ); 19518 } 19519 19520 //=== VK_NV_scissor_exclusive === 19521 19522 template <typename Dispatch> setExclusiveScissorEnableNV(uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables,Dispatch const & d) const19523 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, 19524 uint32_t exclusiveScissorCount, 19525 const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, 19526 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19527 { 19528 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19529 d.vkCmdSetExclusiveScissorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 19530 firstExclusiveScissor, 19531 exclusiveScissorCount, 19532 reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) ); 19533 } 19534 19535 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19536 template <typename Dispatch> 19537 VULKAN_HPP_INLINE void setExclusiveScissorEnableNV(uint32_t firstExclusiveScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables,Dispatch const & d) const19538 CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, 19539 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables, 19540 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19541 { 19542 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19543 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19544 VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorEnableNV && "Function <vkCmdSetExclusiveScissorEnableNV> requires <VK_NV_scissor_exclusive>" ); 19545 # endif 19546 19547 d.vkCmdSetExclusiveScissorEnableNV( 19548 m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast<const VkBool32 *>( exclusiveScissorEnables.data() ) ); 19549 } 19550 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19551 19552 template <typename Dispatch> setExclusiveScissorNV(uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,Dispatch const & d) const19553 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 19554 uint32_t exclusiveScissorCount, 19555 const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, 19556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19557 { 19558 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19559 d.vkCmdSetExclusiveScissorNV( 19560 static_cast<VkCommandBuffer>( m_commandBuffer ), firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) ); 19561 } 19562 19563 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19564 template <typename Dispatch> setExclusiveScissorNV(uint32_t firstExclusiveScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,Dispatch const & d) const19565 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 19566 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, 19567 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19568 { 19569 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19570 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19571 VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorNV && "Function <vkCmdSetExclusiveScissorNV> requires <VK_NV_scissor_exclusive>" ); 19572 # endif 19573 19574 d.vkCmdSetExclusiveScissorNV( 19575 m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) ); 19576 } 19577 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19578 19579 //=== VK_NV_device_diagnostic_checkpoints === 19580 19581 template <typename Dispatch> setCheckpointNV(const void * pCheckpointMarker,Dispatch const & d) const19582 VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19583 { 19584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19585 d.vkCmdSetCheckpointNV( static_cast<VkCommandBuffer>( m_commandBuffer ), pCheckpointMarker ); 19586 } 19587 19588 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19589 template <typename CheckpointMarkerType, typename Dispatch> setCheckpointNV(CheckpointMarkerType const & checkpointMarker,Dispatch const & d) const19590 VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19591 { 19592 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19593 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19594 VULKAN_HPP_ASSERT( d.vkCmdSetCheckpointNV && "Function <vkCmdSetCheckpointNV> requires <VK_NV_device_diagnostic_checkpoints>" ); 19595 # endif 19596 19597 d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) ); 19598 } 19599 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19600 19601 template <typename Dispatch> getCheckpointDataNV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,Dispatch const & d) const19602 VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, 19603 VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, 19604 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19605 { 19606 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19607 d.vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ), pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) ); 19608 } 19609 19610 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19611 template <typename CheckpointDataNVAllocator, 19612 typename Dispatch, 19613 typename std::enable_if<std::is_same<typename CheckpointDataNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, int>::type> 19614 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(Dispatch const & d) const19615 Queue::getCheckpointDataNV( Dispatch const & d ) const 19616 { 19617 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19618 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19619 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" ); 19620 # endif 19621 19622 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData; 19623 uint32_t checkpointDataCount; 19624 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 19625 checkpointData.resize( checkpointDataCount ); 19626 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 19627 19628 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 19629 if ( checkpointDataCount < checkpointData.size() ) 19630 { 19631 checkpointData.resize( checkpointDataCount ); 19632 } 19633 return checkpointData; 19634 } 19635 19636 template <typename CheckpointDataNVAllocator, 19637 typename Dispatch, 19638 typename std::enable_if<std::is_same<typename CheckpointDataNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, int>::type> 19639 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(CheckpointDataNVAllocator & checkpointDataNVAllocator,Dispatch const & d) const19640 Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const 19641 { 19642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19644 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" ); 19645 # endif 19646 19647 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator ); 19648 uint32_t checkpointDataCount; 19649 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 19650 checkpointData.resize( checkpointDataCount ); 19651 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 19652 19653 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 19654 if ( checkpointDataCount < checkpointData.size() ) 19655 { 19656 checkpointData.resize( checkpointDataCount ); 19657 } 19658 return checkpointData; 19659 } 19660 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19661 19662 template <typename Dispatch> getCheckpointData2NV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,Dispatch const & d) const19663 VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, 19664 VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, 19665 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19666 { 19667 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19668 d.vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ), pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) ); 19669 } 19670 19671 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19672 template <typename CheckpointData2NVAllocator, 19673 typename Dispatch, 19674 typename std::enable_if<std::is_same<typename CheckpointData2NVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, int>::type> 19675 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(Dispatch const & d) const19676 Queue::getCheckpointData2NV( Dispatch const & d ) const 19677 { 19678 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19679 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19680 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_NV_device_diagnostic_checkpoints>" ); 19681 # endif 19682 19683 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData; 19684 uint32_t checkpointDataCount; 19685 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 19686 checkpointData.resize( checkpointDataCount ); 19687 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 19688 19689 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 19690 if ( checkpointDataCount < checkpointData.size() ) 19691 { 19692 checkpointData.resize( checkpointDataCount ); 19693 } 19694 return checkpointData; 19695 } 19696 19697 template <typename CheckpointData2NVAllocator, 19698 typename Dispatch, 19699 typename std::enable_if<std::is_same<typename CheckpointData2NVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, int>::type> 19700 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(CheckpointData2NVAllocator & checkpointData2NVAllocator,Dispatch const & d) const19701 Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const 19702 { 19703 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19704 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19705 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_NV_device_diagnostic_checkpoints>" ); 19706 # endif 19707 19708 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator ); 19709 uint32_t checkpointDataCount; 19710 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 19711 checkpointData.resize( checkpointDataCount ); 19712 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 19713 19714 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 19715 if ( checkpointDataCount < checkpointData.size() ) 19716 { 19717 checkpointData.resize( checkpointDataCount ); 19718 } 19719 return checkpointData; 19720 } 19721 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19722 19723 //=== VK_KHR_timeline_semaphore === 19724 19725 template <typename Dispatch> getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const19726 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 19727 uint64_t * pValue, 19728 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19729 { 19730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19731 return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), pValue ) ); 19732 } 19733 19734 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19735 template <typename Dispatch> 19736 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const19737 Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const 19738 { 19739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19740 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19741 VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValueKHR && "Function <vkGetSemaphoreCounterValueKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 19742 # endif 19743 19744 uint64_t value; 19745 VULKAN_HPP_NAMESPACE::Result result = 19746 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 19747 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); 19748 19749 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); 19750 } 19751 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19752 19753 template <typename Dispatch> waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const19754 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 19755 uint64_t timeout, 19756 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19757 { 19758 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19759 return static_cast<Result>( 19760 d.vkWaitSemaphoresKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 19761 } 19762 19763 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19764 template <typename Dispatch> 19765 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const19766 Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const 19767 { 19768 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19769 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19770 VULKAN_HPP_ASSERT( d.vkWaitSemaphoresKHR && "Function <vkWaitSemaphoresKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 19771 # endif 19772 19773 VULKAN_HPP_NAMESPACE::Result result = 19774 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 19775 VULKAN_HPP_NAMESPACE::detail::resultCheck( 19776 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 19777 19778 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 19779 } 19780 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19781 19782 template <typename Dispatch> signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const19783 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, 19784 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19785 { 19786 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19787 return static_cast<Result>( d.vkSignalSemaphoreKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 19788 } 19789 19790 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19791 template <typename Dispatch> 19792 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,Dispatch const & d) const19793 Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 19794 { 19795 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19796 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19797 VULKAN_HPP_ASSERT( d.vkSignalSemaphoreKHR && "Function <vkSignalSemaphoreKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 19798 # endif 19799 19800 VULKAN_HPP_NAMESPACE::Result result = 19801 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 19802 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); 19803 19804 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19805 } 19806 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19807 19808 //=== VK_INTEL_performance_query === 19809 19810 template <typename Dispatch> initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,Dispatch const & d) const19811 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( 19812 const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19813 { 19814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19815 return static_cast<Result>( d.vkInitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ), 19816 reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) ); 19817 } 19818 19819 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19820 template <typename Dispatch> 19821 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo,Dispatch const & d) const19822 Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const 19823 { 19824 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19825 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19826 VULKAN_HPP_ASSERT( d.vkInitializePerformanceApiINTEL && "Function <vkInitializePerformanceApiINTEL> requires <VK_INTEL_performance_query>" ); 19827 # endif 19828 19829 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19830 d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) ); 19831 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); 19832 19833 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19834 } 19835 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19836 19837 template <typename Dispatch> uninitializePerformanceApiINTEL(Dispatch const & d) const19838 VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19839 { 19840 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19841 d.vkUninitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ) ); 19842 } 19843 19844 template <typename Dispatch> setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const19845 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, 19846 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19847 { 19848 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19849 return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), 19850 reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) ); 19851 } 19852 19853 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19854 template <typename Dispatch> 19855 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo,Dispatch const & d) const19856 CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const 19857 { 19858 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19859 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19860 VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceMarkerINTEL && "Function <vkCmdSetPerformanceMarkerINTEL> requires <VK_INTEL_performance_query>" ); 19861 # endif 19862 19863 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19864 d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) ); 19865 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); 19866 19867 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19868 } 19869 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19870 19871 template <typename Dispatch> setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const19872 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( 19873 const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19874 { 19875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19876 return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), 19877 reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) ); 19878 } 19879 19880 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19881 template <typename Dispatch> 19882 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo,Dispatch const & d) const19883 CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const 19884 { 19885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19886 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19887 VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceStreamMarkerINTEL && "Function <vkCmdSetPerformanceStreamMarkerINTEL> requires <VK_INTEL_performance_query>" ); 19888 # endif 19889 19890 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19891 d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) ); 19892 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); 19893 19894 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19895 } 19896 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19897 19898 template <typename Dispatch> setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,Dispatch const & d) const19899 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( 19900 const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19901 { 19902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19903 return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), 19904 reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) ); 19905 } 19906 19907 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19908 template <typename Dispatch> 19909 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo,Dispatch const & d) const19910 CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const 19911 { 19912 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19913 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19914 VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceOverrideINTEL && "Function <vkCmdSetPerformanceOverrideINTEL> requires <VK_INTEL_performance_query>" ); 19915 # endif 19916 19917 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19918 d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) ); 19919 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); 19920 19921 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19922 } 19923 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19924 19925 template <typename Dispatch> 19926 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,Dispatch const & d) const19927 Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, 19928 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, 19929 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19930 { 19931 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19932 return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), 19933 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), 19934 reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) ); 19935 } 19936 19937 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19938 template <typename Dispatch> 19939 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const19940 Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const 19941 { 19942 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19943 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19944 VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 19945 # endif 19946 19947 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 19948 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19949 d.vkAcquirePerformanceConfigurationINTEL( m_device, 19950 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 19951 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 19952 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); 19953 19954 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( configuration ) ); 19955 } 19956 19957 # ifndef VULKAN_HPP_NO_SMART_HANDLE 19958 template <typename Dispatch> 19959 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type acquirePerformanceConfigurationINTELUnique(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const19960 Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, 19961 Dispatch const & d ) const 19962 { 19963 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19964 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19965 VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 19966 # endif 19967 19968 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 19969 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19970 d.vkAcquirePerformanceConfigurationINTEL( m_device, 19971 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 19972 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 19973 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); 19974 19975 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 19976 result, 19977 UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( configuration, detail::ObjectRelease<Device, Dispatch>( *this, d ) ) ); 19978 } 19979 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 19980 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19981 19982 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19983 template <typename Dispatch> releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const19984 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 19985 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19986 { 19987 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19988 return static_cast<Result>( 19989 d.vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 19990 } 19991 #else 19992 template <typename Dispatch> 19993 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const19994 Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 19995 { 19996 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19997 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19998 VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 19999 # endif 20000 20001 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20002 d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 20003 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); 20004 20005 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20006 } 20007 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20008 20009 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20010 template <typename Dispatch> release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const20011 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 20012 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20013 { 20014 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20015 return static_cast<Result>( 20016 d.vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 20017 } 20018 #else 20019 template <typename Dispatch> 20020 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const20021 Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 20022 { 20023 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20024 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20025 VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 20026 # endif 20027 20028 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20029 d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 20030 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); 20031 20032 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20033 } 20034 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20035 20036 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20037 template <typename Dispatch> setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const20038 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 20039 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20040 { 20041 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20042 return static_cast<Result>( 20043 d.vkQueueSetPerformanceConfigurationINTEL( static_cast<VkQueue>( m_queue ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 20044 } 20045 #else 20046 template <typename Dispatch> 20047 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const20048 Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 20049 { 20050 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20051 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20052 VULKAN_HPP_ASSERT( d.vkQueueSetPerformanceConfigurationINTEL && 20053 "Function <vkQueueSetPerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 20054 # endif 20055 20056 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20057 d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 20058 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); 20059 20060 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20061 } 20062 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20063 20064 template <typename Dispatch> getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,Dispatch const & d) const20065 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, 20066 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, 20067 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20068 { 20069 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20070 return static_cast<Result>( d.vkGetPerformanceParameterINTEL( 20071 static_cast<VkDevice>( m_device ), static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) ); 20072 } 20073 20074 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20075 template <typename Dispatch> 20076 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,Dispatch const & d) const20077 Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const 20078 { 20079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20080 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20081 VULKAN_HPP_ASSERT( d.vkGetPerformanceParameterINTEL && "Function <vkGetPerformanceParameterINTEL> requires <VK_INTEL_performance_query>" ); 20082 # endif 20083 20084 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; 20085 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPerformanceParameterINTEL( 20086 m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) ); 20087 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); 20088 20089 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); 20090 } 20091 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20092 20093 //=== VK_AMD_display_native_hdr === 20094 20095 template <typename Dispatch> setLocalDimmingAMD(VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,Dispatch const & d) const20096 VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, 20097 VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, 20098 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20099 { 20100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20101 d.vkSetLocalDimmingAMD( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) ); 20102 } 20103 20104 #if defined( VK_USE_PLATFORM_FUCHSIA ) 20105 //=== VK_FUCHSIA_imagepipe_surface === 20106 20107 template <typename Dispatch> 20108 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createImagePipeSurfaceFUCHSIA(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const20109 Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, 20110 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20111 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 20112 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20113 { 20114 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20115 return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( static_cast<VkInstance>( m_instance ), 20116 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), 20117 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 20118 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 20119 } 20120 20121 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20122 template <typename Dispatch> 20123 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20124 Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 20125 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20126 Dispatch const & d ) const 20127 { 20128 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20129 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20130 VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" ); 20131 # endif 20132 20133 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20134 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 20135 m_instance, 20136 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 20137 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20138 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20139 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); 20140 20141 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 20142 } 20143 20144 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20145 template <typename Dispatch> 20146 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20147 Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 20148 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20149 Dispatch const & d ) const 20150 { 20151 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20152 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20153 VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" ); 20154 # endif 20155 20156 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20157 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 20158 m_instance, 20159 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 20160 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20161 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20162 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); 20163 20164 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 20165 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 20166 } 20167 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20168 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20169 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 20170 20171 #if defined( VK_USE_PLATFORM_METAL_EXT ) 20172 //=== VK_EXT_metal_surface === 20173 20174 template <typename Dispatch> createMetalSurfaceEXT(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const20175 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, 20176 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20177 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 20178 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20179 { 20180 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20181 return static_cast<Result>( d.vkCreateMetalSurfaceEXT( static_cast<VkInstance>( m_instance ), 20182 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), 20183 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 20184 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 20185 } 20186 20187 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20188 template <typename Dispatch> 20189 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20190 Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, 20191 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20192 Dispatch const & d ) const 20193 { 20194 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20195 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20196 VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" ); 20197 # endif 20198 20199 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20200 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20201 d.vkCreateMetalSurfaceEXT( m_instance, 20202 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 20203 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20204 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20205 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); 20206 20207 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 20208 } 20209 20210 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20211 template <typename Dispatch> 20212 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMetalSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20213 Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, 20214 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20215 Dispatch const & d ) const 20216 { 20217 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20218 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20219 VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" ); 20220 # endif 20221 20222 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20223 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20224 d.vkCreateMetalSurfaceEXT( m_instance, 20225 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 20226 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20227 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20228 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); 20229 20230 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 20231 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 20232 } 20233 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20234 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20235 #endif /*VK_USE_PLATFORM_METAL_EXT*/ 20236 20237 //=== VK_KHR_fragment_shading_rate === 20238 20239 template <typename Dispatch> 20240 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getFragmentShadingRatesKHR(uint32_t * pFragmentShadingRateCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,Dispatch const & d) const20241 PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, 20242 VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, 20243 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20244 { 20245 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20246 return static_cast<Result>( 20247 d.vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 20248 pFragmentShadingRateCount, 20249 reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) ); 20250 } 20251 20252 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20253 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, 20254 typename Dispatch, 20255 typename std::enable_if<std::is_same<typename PhysicalDeviceFragmentShadingRateKHRAllocator::value_type, 20256 VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value, 20257 int>::type> 20258 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20259 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(Dispatch const & d) const20260 PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const 20261 { 20262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20263 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20264 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && 20265 "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" ); 20266 # endif 20267 20268 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates; 20269 uint32_t fragmentShadingRateCount; 20270 VULKAN_HPP_NAMESPACE::Result result; 20271 do 20272 { 20273 result = 20274 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 20275 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) 20276 { 20277 fragmentShadingRates.resize( fragmentShadingRateCount ); 20278 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 20279 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 20280 } 20281 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20282 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 20283 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 20284 if ( fragmentShadingRateCount < fragmentShadingRates.size() ) 20285 { 20286 fragmentShadingRates.resize( fragmentShadingRateCount ); 20287 } 20288 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); 20289 } 20290 20291 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, 20292 typename Dispatch, 20293 typename std::enable_if<std::is_same<typename PhysicalDeviceFragmentShadingRateKHRAllocator::value_type, 20294 VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value, 20295 int>::type> 20296 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20297 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,Dispatch const & d) const20298 PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, 20299 Dispatch const & d ) const 20300 { 20301 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20302 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20303 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && 20304 "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" ); 20305 # endif 20306 20307 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates( 20308 physicalDeviceFragmentShadingRateKHRAllocator ); 20309 uint32_t fragmentShadingRateCount; 20310 VULKAN_HPP_NAMESPACE::Result result; 20311 do 20312 { 20313 result = 20314 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 20315 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) 20316 { 20317 fragmentShadingRates.resize( fragmentShadingRateCount ); 20318 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 20319 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 20320 } 20321 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20322 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 20323 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 20324 if ( fragmentShadingRateCount < fragmentShadingRates.size() ) 20325 { 20326 fragmentShadingRates.resize( fragmentShadingRateCount ); 20327 } 20328 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); 20329 } 20330 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20331 20332 template <typename Dispatch> setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const20333 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, 20334 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 20335 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20336 { 20337 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20338 d.vkCmdSetFragmentShadingRateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 20339 reinterpret_cast<const VkExtent2D *>( pFragmentSize ), 20340 reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 20341 } 20342 20343 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20344 template <typename Dispatch> setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const20345 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, 20346 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 20347 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20348 { 20349 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20350 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20351 VULKAN_HPP_ASSERT( d.vkCmdSetFragmentShadingRateKHR && "Function <vkCmdSetFragmentShadingRateKHR> requires <VK_KHR_fragment_shading_rate>" ); 20352 # endif 20353 20354 d.vkCmdSetFragmentShadingRateKHR( 20355 m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 20356 } 20357 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20358 20359 //=== VK_KHR_dynamic_rendering_local_read === 20360 20361 template <typename Dispatch> setRenderingAttachmentLocationsKHR(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo,Dispatch const & d) const20362 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, 20363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20364 { 20365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20366 d.vkCmdSetRenderingAttachmentLocationsKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 20367 reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( pLocationInfo ) ); 20368 } 20369 20370 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20371 template <typename Dispatch> setRenderingAttachmentLocationsKHR(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo,Dispatch const & d) const20372 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, 20373 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20374 { 20375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20376 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20377 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocationsKHR && 20378 "Function <vkCmdSetRenderingAttachmentLocationsKHR> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); 20379 # endif 20380 20381 d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( &locationInfo ) ); 20382 } 20383 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20384 20385 template <typename Dispatch> 20386 VULKAN_HPP_INLINE void setRenderingInputAttachmentIndicesKHR(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo,Dispatch const & d) const20387 CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, 20388 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20389 { 20390 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20391 d.vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 20392 reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( pInputAttachmentIndexInfo ) ); 20393 } 20394 20395 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20396 template <typename Dispatch> 20397 VULKAN_HPP_INLINE void setRenderingInputAttachmentIndicesKHR(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo,Dispatch const & d) const20398 CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, 20399 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20400 { 20401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20402 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20403 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndicesKHR && 20404 "Function <vkCmdSetRenderingInputAttachmentIndicesKHR> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); 20405 # endif 20406 20407 d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( &inputAttachmentIndexInfo ) ); 20408 } 20409 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20410 20411 //=== VK_EXT_buffer_device_address === 20412 20413 template <typename Dispatch> getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const20414 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 20415 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20416 { 20417 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20418 return static_cast<DeviceAddress>( 20419 d.vkGetBufferDeviceAddressEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 20420 } 20421 20422 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20423 template <typename Dispatch> getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const20424 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 20425 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20426 { 20427 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20428 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20429 VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressEXT && 20430 "Function <vkGetBufferDeviceAddressEXT> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 20431 # endif 20432 20433 VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 20434 20435 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 20436 } 20437 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20438 20439 //=== VK_EXT_tooling_info === 20440 20441 template <typename Dispatch> getToolPropertiesEXT(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,Dispatch const & d) const20442 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, 20443 VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, 20444 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20445 { 20446 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20447 return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( 20448 static_cast<VkPhysicalDevice>( m_physicalDevice ), pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); 20449 } 20450 20451 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20452 template < 20453 typename PhysicalDeviceToolPropertiesAllocator, 20454 typename Dispatch, 20455 typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, 20456 int>::type> 20457 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20458 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT(Dispatch const & d) const20459 PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const 20460 { 20461 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20462 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20463 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && 20464 "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 20465 # endif 20466 20467 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; 20468 uint32_t toolCount; 20469 VULKAN_HPP_NAMESPACE::Result result; 20470 do 20471 { 20472 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 20473 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 20474 { 20475 toolProperties.resize( toolCount ); 20476 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20477 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 20478 } 20479 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20480 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 20481 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 20482 if ( toolCount < toolProperties.size() ) 20483 { 20484 toolProperties.resize( toolCount ); 20485 } 20486 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 20487 } 20488 20489 template < 20490 typename PhysicalDeviceToolPropertiesAllocator, 20491 typename Dispatch, 20492 typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, 20493 int>::type> 20494 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20495 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT(PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,Dispatch const & d) const20496 PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const 20497 { 20498 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20499 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20500 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && 20501 "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 20502 # endif 20503 20504 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( 20505 physicalDeviceToolPropertiesAllocator ); 20506 uint32_t toolCount; 20507 VULKAN_HPP_NAMESPACE::Result result; 20508 do 20509 { 20510 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 20511 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 20512 { 20513 toolProperties.resize( toolCount ); 20514 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20515 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 20516 } 20517 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20518 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 20519 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 20520 if ( toolCount < toolProperties.size() ) 20521 { 20522 toolProperties.resize( toolCount ); 20523 } 20524 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 20525 } 20526 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20527 20528 //=== VK_KHR_present_wait === 20529 20530 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20531 template <typename Dispatch> waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t presentId,uint64_t timeout,Dispatch const & d) const20532 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 20533 uint64_t presentId, 20534 uint64_t timeout, 20535 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20536 { 20537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20538 return static_cast<Result>( d.vkWaitForPresentKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); 20539 } 20540 #else 20541 template <typename Dispatch> 20542 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t presentId,uint64_t timeout,Dispatch const & d) const20543 Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const 20544 { 20545 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20546 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20547 VULKAN_HPP_ASSERT( d.vkWaitForPresentKHR && "Function <vkWaitForPresentKHR> requires <VK_KHR_present_wait>" ); 20548 # endif 20549 20550 VULKAN_HPP_NAMESPACE::Result result = 20551 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); 20552 VULKAN_HPP_NAMESPACE::detail::resultCheck( 20553 result, 20554 VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", 20555 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 20556 20557 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 20558 } 20559 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20560 20561 //=== VK_NV_cooperative_matrix === 20562 20563 template <typename Dispatch> getCooperativeMatrixPropertiesNV(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,Dispatch const & d) const20564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( 20565 uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20566 { 20567 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20568 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 20569 static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) ); 20570 } 20571 20572 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20573 template <typename CooperativeMatrixPropertiesNVAllocator, 20574 typename Dispatch, 20575 typename std::enable_if< 20576 std::is_same<typename CooperativeMatrixPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, 20577 int>::type> 20578 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20579 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const & d) const20580 PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const 20581 { 20582 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20583 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20584 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && 20585 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" ); 20586 # endif 20587 20588 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties; 20589 uint32_t propertyCount; 20590 VULKAN_HPP_NAMESPACE::Result result; 20591 do 20592 { 20593 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 20594 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 20595 { 20596 properties.resize( propertyCount ); 20597 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 20598 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 20599 } 20600 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20601 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 20602 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 20603 if ( propertyCount < properties.size() ) 20604 { 20605 properties.resize( propertyCount ); 20606 } 20607 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 20608 } 20609 20610 template <typename CooperativeMatrixPropertiesNVAllocator, 20611 typename Dispatch, 20612 typename std::enable_if< 20613 std::is_same<typename CooperativeMatrixPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, 20614 int>::type> 20615 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20616 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,Dispatch const & d) const20617 PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, 20618 Dispatch const & d ) const 20619 { 20620 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20621 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20622 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && 20623 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" ); 20624 # endif 20625 20626 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties( 20627 cooperativeMatrixPropertiesNVAllocator ); 20628 uint32_t propertyCount; 20629 VULKAN_HPP_NAMESPACE::Result result; 20630 do 20631 { 20632 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 20633 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 20634 { 20635 properties.resize( propertyCount ); 20636 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 20637 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 20638 } 20639 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20640 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 20641 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 20642 if ( propertyCount < properties.size() ) 20643 { 20644 properties.resize( propertyCount ); 20645 } 20646 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 20647 } 20648 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20649 20650 //=== VK_NV_coverage_reduction_mode === 20651 20652 template <typename Dispatch> getSupportedFramebufferMixedSamplesCombinationsNV(uint32_t * pCombinationCount,VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,Dispatch const & d) const20653 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 20654 uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20655 { 20656 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20657 return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 20658 static_cast<VkPhysicalDevice>( m_physicalDevice ), pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) ); 20659 } 20660 20661 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20662 template <typename FramebufferMixedSamplesCombinationNVAllocator, 20663 typename Dispatch, 20664 typename std::enable_if<std::is_same<typename FramebufferMixedSamplesCombinationNVAllocator::value_type, 20665 VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value, 20666 int>::type> 20667 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20668 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const & d) const20669 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const 20670 { 20671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20672 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20673 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && 20674 "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" ); 20675 # endif 20676 20677 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations; 20678 uint32_t combinationCount; 20679 VULKAN_HPP_NAMESPACE::Result result; 20680 do 20681 { 20682 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20683 d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); 20684 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) 20685 { 20686 combinations.resize( combinationCount ); 20687 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 20688 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 20689 } 20690 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20691 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 20692 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 20693 if ( combinationCount < combinations.size() ) 20694 { 20695 combinations.resize( combinationCount ); 20696 } 20697 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); 20698 } 20699 20700 template <typename FramebufferMixedSamplesCombinationNVAllocator, 20701 typename Dispatch, 20702 typename std::enable_if<std::is_same<typename FramebufferMixedSamplesCombinationNVAllocator::value_type, 20703 VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value, 20704 int>::type> 20705 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20706 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,Dispatch const & d) const20707 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 20708 FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const 20709 { 20710 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20711 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20712 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && 20713 "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" ); 20714 # endif 20715 20716 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations( 20717 framebufferMixedSamplesCombinationNVAllocator ); 20718 uint32_t combinationCount; 20719 VULKAN_HPP_NAMESPACE::Result result; 20720 do 20721 { 20722 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20723 d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); 20724 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) 20725 { 20726 combinations.resize( combinationCount ); 20727 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 20728 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 20729 } 20730 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20731 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 20732 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 20733 if ( combinationCount < combinations.size() ) 20734 { 20735 combinations.resize( combinationCount ); 20736 } 20737 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); 20738 } 20739 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20740 20741 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 20742 //=== VK_EXT_full_screen_exclusive === 20743 20744 template <typename Dispatch> 20745 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const20746 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 20747 uint32_t * pPresentModeCount, 20748 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 20749 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20750 { 20751 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20752 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), 20753 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 20754 pPresentModeCount, 20755 reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 20756 } 20757 20758 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20759 template <typename PresentModeKHRAllocator, 20760 typename Dispatch, 20761 typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 20762 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const20763 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 20764 { 20765 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20766 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20767 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && 20768 "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); 20769 # endif 20770 20771 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes; 20772 uint32_t presentModeCount; 20773 VULKAN_HPP_NAMESPACE::Result result; 20774 do 20775 { 20776 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 20777 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); 20778 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 20779 { 20780 presentModes.resize( presentModeCount ); 20781 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20782 d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 20783 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 20784 &presentModeCount, 20785 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 20786 } 20787 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20788 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 20789 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 20790 if ( presentModeCount < presentModes.size() ) 20791 { 20792 presentModes.resize( presentModeCount ); 20793 } 20794 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 20795 } 20796 20797 template <typename PresentModeKHRAllocator, 20798 typename Dispatch, 20799 typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 20800 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const20801 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 20802 PresentModeKHRAllocator & presentModeKHRAllocator, 20803 Dispatch const & d ) const 20804 { 20805 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20806 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20807 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && 20808 "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); 20809 # endif 20810 20811 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 20812 uint32_t presentModeCount; 20813 VULKAN_HPP_NAMESPACE::Result result; 20814 do 20815 { 20816 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 20817 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); 20818 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 20819 { 20820 presentModes.resize( presentModeCount ); 20821 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20822 d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 20823 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 20824 &presentModeCount, 20825 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 20826 } 20827 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20828 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 20829 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 20830 if ( presentModeCount < presentModes.size() ) 20831 { 20832 presentModes.resize( presentModeCount ); 20833 } 20834 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 20835 } 20836 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20837 20838 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20839 template <typename Dispatch> acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const20840 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 20841 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20842 { 20843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20844 return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) ); 20845 } 20846 # else 20847 template <typename Dispatch> 20848 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const20849 Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 20850 { 20851 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20852 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20853 VULKAN_HPP_ASSERT( d.vkAcquireFullScreenExclusiveModeEXT && "Function <vkAcquireFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" ); 20854 # endif 20855 20856 VULKAN_HPP_NAMESPACE::Result result = 20857 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 20858 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); 20859 20860 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20861 } 20862 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20863 20864 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20865 template <typename Dispatch> releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const20866 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 20867 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20868 { 20869 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20870 return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) ); 20871 } 20872 # else 20873 template <typename Dispatch> 20874 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const20875 Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 20876 { 20877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20878 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20879 VULKAN_HPP_ASSERT( d.vkReleaseFullScreenExclusiveModeEXT && "Function <vkReleaseFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" ); 20880 # endif 20881 20882 VULKAN_HPP_NAMESPACE::Result result = 20883 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 20884 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); 20885 20886 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20887 } 20888 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20889 20890 template <typename Dispatch> 20891 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getGroupSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const20892 Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 20893 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 20894 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20895 { 20896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20897 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( static_cast<VkDevice>( m_device ), 20898 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 20899 reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 20900 } 20901 20902 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20903 template <typename Dispatch> 20904 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const20905 Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 20906 { 20907 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20908 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20909 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModes2EXT && 20910 "Function <vkGetDeviceGroupSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); 20911 # endif 20912 20913 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 20914 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( 20915 m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 20916 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); 20917 20918 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); 20919 } 20920 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20921 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 20922 20923 //=== VK_EXT_headless_surface === 20924 20925 template <typename Dispatch> createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const20926 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, 20927 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20928 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 20929 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20930 { 20931 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20932 return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( static_cast<VkInstance>( m_instance ), 20933 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), 20934 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 20935 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 20936 } 20937 20938 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20939 template <typename Dispatch> 20940 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20941 Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, 20942 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20943 Dispatch const & d ) const 20944 { 20945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20946 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20947 VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" ); 20948 # endif 20949 20950 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20951 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT( 20952 m_instance, 20953 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 20954 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20955 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20956 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); 20957 20958 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 20959 } 20960 20961 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20962 template <typename Dispatch> 20963 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createHeadlessSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20964 Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, 20965 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20966 Dispatch const & d ) const 20967 { 20968 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20969 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20970 VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" ); 20971 # endif 20972 20973 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20974 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT( 20975 m_instance, 20976 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 20977 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20978 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20979 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); 20980 20981 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 20982 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 20983 } 20984 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20985 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20986 20987 //=== VK_KHR_buffer_device_address === 20988 20989 template <typename Dispatch> getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const20990 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 20991 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20992 { 20993 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20994 return static_cast<DeviceAddress>( 20995 d.vkGetBufferDeviceAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 20996 } 20997 20998 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20999 template <typename Dispatch> getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const21000 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 21001 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21002 { 21003 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21004 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21005 VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressKHR && 21006 "Function <vkGetBufferDeviceAddressKHR> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 21007 # endif 21008 21009 VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 21010 21011 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 21012 } 21013 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21014 21015 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const21016 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 21017 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21018 { 21019 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21020 return d.vkGetBufferOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 21021 } 21022 21023 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21024 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const21025 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 21026 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21027 { 21028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21029 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21030 VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddressKHR && 21031 "Function <vkGetBufferOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 21032 # endif 21033 21034 uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 21035 21036 return result; 21037 } 21038 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21039 21040 template <typename Dispatch> getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const21041 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 21042 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21043 { 21044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21045 return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), 21046 reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 21047 } 21048 21049 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21050 template <typename Dispatch> getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const21051 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, 21052 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21053 { 21054 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21055 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21056 VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddressKHR && 21057 "Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 21058 # endif 21059 21060 uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 21061 21062 return result; 21063 } 21064 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21065 21066 //=== VK_EXT_line_rasterization === 21067 21068 template <typename Dispatch> 21069 VULKAN_HPP_INLINE void setLineStippleEXT(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const21070 CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21071 { 21072 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21073 d.vkCmdSetLineStippleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); 21074 } 21075 21076 //=== VK_EXT_host_query_reset === 21077 21078 template <typename Dispatch> resetQueryPoolEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const21079 VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 21080 uint32_t firstQuery, 21081 uint32_t queryCount, 21082 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21083 { 21084 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21085 d.vkResetQueryPoolEXT( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 21086 } 21087 21088 //=== VK_EXT_extended_dynamic_state === 21089 21090 template <typename Dispatch> setCullModeEXT(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const21091 VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21092 { 21093 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21094 d.vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) ); 21095 } 21096 21097 template <typename Dispatch> setFrontFaceEXT(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const21098 VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21099 { 21100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21101 d.vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) ); 21102 } 21103 21104 template <typename Dispatch> setPrimitiveTopologyEXT(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const21105 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, 21106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21107 { 21108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21109 d.vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) ); 21110 } 21111 21112 template <typename Dispatch> setViewportWithCountEXT(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const21113 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, 21114 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 21115 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21116 { 21117 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21118 d.vkCmdSetViewportWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 21119 } 21120 21121 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21122 template <typename Dispatch> setViewportWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const21123 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 21124 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21125 { 21126 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21127 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21128 VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCountEXT && 21129 "Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 21130 # endif 21131 21132 d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 21133 } 21134 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21135 21136 template <typename Dispatch> 21137 VULKAN_HPP_INLINE void setScissorWithCountEXT(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const21138 CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21139 { 21140 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21141 d.vkCmdSetScissorWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 21142 } 21143 21144 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21145 template <typename Dispatch> setScissorWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const21146 VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 21147 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21148 { 21149 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21150 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21151 VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCountEXT && 21152 "Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 21153 # endif 21154 21155 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 21156 } 21157 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21158 21159 template <typename Dispatch> bindVertexBuffers2EXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const21160 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 21161 uint32_t bindingCount, 21162 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 21163 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 21164 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 21165 const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, 21166 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21167 { 21168 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21169 d.vkCmdBindVertexBuffers2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 21170 firstBinding, 21171 bindingCount, 21172 reinterpret_cast<const VkBuffer *>( pBuffers ), 21173 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 21174 reinterpret_cast<const VkDeviceSize *>( pSizes ), 21175 reinterpret_cast<const VkDeviceSize *>( pStrides ) ); 21176 } 21177 21178 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21179 template <typename Dispatch> bindVertexBuffers2EXT(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const21180 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 21181 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 21182 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 21183 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 21184 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, 21185 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 21186 { 21187 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21188 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21189 VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2EXT && 21190 "Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 21191 # endif 21192 # ifdef VULKAN_HPP_NO_EXCEPTIONS 21193 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 21194 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 21195 VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); 21196 # else 21197 if ( buffers.size() != offsets.size() ) 21198 { 21199 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); 21200 } 21201 if ( !sizes.empty() && buffers.size() != sizes.size() ) 21202 { 21203 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); 21204 } 21205 if ( !strides.empty() && buffers.size() != strides.size() ) 21206 { 21207 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); 21208 } 21209 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 21210 21211 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, 21212 firstBinding, 21213 buffers.size(), 21214 reinterpret_cast<const VkBuffer *>( buffers.data() ), 21215 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 21216 reinterpret_cast<const VkDeviceSize *>( sizes.data() ), 21217 reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); 21218 } 21219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21220 21221 template <typename Dispatch> setDepthTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const21222 VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21223 { 21224 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21225 d.vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) ); 21226 } 21227 21228 template <typename Dispatch> setDepthWriteEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const21229 VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21230 { 21231 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21232 d.vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) ); 21233 } 21234 21235 template <typename Dispatch> setDepthCompareOpEXT(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const21236 VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21237 { 21238 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21239 d.vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) ); 21240 } 21241 21242 template <typename Dispatch> setDepthBoundsTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const21243 VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, 21244 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21245 { 21246 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21247 d.vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) ); 21248 } 21249 21250 template <typename Dispatch> setStencilTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const21251 VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21252 { 21253 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21254 d.vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) ); 21255 } 21256 21257 template <typename Dispatch> setStencilOpEXT(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const21258 VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 21259 VULKAN_HPP_NAMESPACE::StencilOp failOp, 21260 VULKAN_HPP_NAMESPACE::StencilOp passOp, 21261 VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, 21262 VULKAN_HPP_NAMESPACE::CompareOp compareOp, 21263 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21264 { 21265 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21266 d.vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 21267 static_cast<VkStencilFaceFlags>( faceMask ), 21268 static_cast<VkStencilOp>( failOp ), 21269 static_cast<VkStencilOp>( passOp ), 21270 static_cast<VkStencilOp>( depthFailOp ), 21271 static_cast<VkCompareOp>( compareOp ) ); 21272 } 21273 21274 //=== VK_KHR_deferred_host_operations === 21275 21276 template <typename Dispatch> createDeferredOperationKHR(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,Dispatch const & d) const21277 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21278 VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, 21279 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21280 { 21281 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21282 return static_cast<Result>( d.vkCreateDeferredOperationKHR( static_cast<VkDevice>( m_device ), 21283 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 21284 reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) ); 21285 } 21286 21287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21288 template <typename Dispatch> 21289 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21290 Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 21291 { 21292 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21293 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21294 VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 21295 # endif 21296 21297 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 21298 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR( 21299 m_device, 21300 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21301 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 21302 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); 21303 21304 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deferredOperation ) ); 21305 } 21306 21307 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21308 template <typename Dispatch> 21309 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type createDeferredOperationKHRUnique(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21310 Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 21311 { 21312 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21313 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21314 VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 21315 # endif 21316 21317 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 21318 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR( 21319 m_device, 21320 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21321 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 21322 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); 21323 21324 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 21325 result, 21326 UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( deferredOperation, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 21327 } 21328 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21329 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21330 21331 template <typename Dispatch> destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21332 VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 21333 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21334 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21335 { 21336 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21337 d.vkDestroyDeferredOperationKHR( 21338 static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21339 } 21340 21341 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21342 template <typename Dispatch> destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21343 VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 21344 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21345 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21346 { 21347 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21348 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21349 VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 21350 # endif 21351 21352 d.vkDestroyDeferredOperationKHR( 21353 m_device, 21354 static_cast<VkDeferredOperationKHR>( operation ), 21355 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21356 } 21357 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21358 21359 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21360 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 21361 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21362 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21363 { 21364 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21365 d.vkDestroyDeferredOperationKHR( 21366 static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21367 } 21368 21369 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21370 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21371 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 21372 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21373 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21374 { 21375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21376 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21377 VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 21378 # endif 21379 21380 d.vkDestroyDeferredOperationKHR( 21381 m_device, 21382 static_cast<VkDeferredOperationKHR>( operation ), 21383 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21384 } 21385 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21386 21387 template <typename Dispatch> getDeferredOperationMaxConcurrencyKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const21388 VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 21389 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21390 { 21391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21392 return d.vkGetDeferredOperationMaxConcurrencyKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ); 21393 } 21394 21395 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 21396 template <typename Dispatch> getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const21397 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 21398 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21399 { 21400 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21401 return static_cast<Result>( d.vkGetDeferredOperationResultKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ) ); 21402 } 21403 #else 21404 template <typename Dispatch> 21405 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const21406 Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21407 { 21408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21409 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21410 VULKAN_HPP_ASSERT( d.vkGetDeferredOperationResultKHR && "Function <vkGetDeferredOperationResultKHR> requires <VK_KHR_deferred_host_operations>" ); 21411 # endif 21412 21413 VULKAN_HPP_NAMESPACE::Result result = 21414 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 21415 21416 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 21417 } 21418 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 21419 21420 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 21421 template <typename Dispatch> deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const21422 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 21423 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21424 { 21425 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21426 return static_cast<Result>( d.vkDeferredOperationJoinKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ) ); 21427 } 21428 #else 21429 template <typename Dispatch> deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const21430 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 21431 Dispatch const & d ) const 21432 { 21433 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21434 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21435 VULKAN_HPP_ASSERT( d.vkDeferredOperationJoinKHR && "Function <vkDeferredOperationJoinKHR> requires <VK_KHR_deferred_host_operations>" ); 21436 # endif 21437 21438 VULKAN_HPP_NAMESPACE::Result result = 21439 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 21440 VULKAN_HPP_NAMESPACE::detail::resultCheck( 21441 result, 21442 VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", 21443 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); 21444 21445 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 21446 } 21447 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 21448 21449 //=== VK_KHR_pipeline_executable_properties === 21450 21451 template <typename Dispatch> getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,uint32_t * pExecutableCount,VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,Dispatch const & d) const21452 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, 21453 uint32_t * pExecutableCount, 21454 VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, 21455 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21456 { 21457 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21458 return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( static_cast<VkDevice>( m_device ), 21459 reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ), 21460 pExecutableCount, 21461 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) ); 21462 } 21463 21464 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21465 template <typename PipelineExecutablePropertiesKHRAllocator, 21466 typename Dispatch, 21467 typename std::enable_if< 21468 std::is_same<typename PipelineExecutablePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, 21469 int>::type> 21470 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21471 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,Dispatch const & d) const21472 Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const 21473 { 21474 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21475 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21476 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && 21477 "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" ); 21478 # endif 21479 21480 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties; 21481 uint32_t executableCount; 21482 VULKAN_HPP_NAMESPACE::Result result; 21483 do 21484 { 21485 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21486 d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 21487 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) 21488 { 21489 properties.resize( executableCount ); 21490 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21491 d.vkGetPipelineExecutablePropertiesKHR( m_device, 21492 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 21493 &executableCount, 21494 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 21495 } 21496 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21497 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 21498 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 21499 if ( executableCount < properties.size() ) 21500 { 21501 properties.resize( executableCount ); 21502 } 21503 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 21504 } 21505 21506 template <typename PipelineExecutablePropertiesKHRAllocator, 21507 typename Dispatch, 21508 typename std::enable_if< 21509 std::is_same<typename PipelineExecutablePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, 21510 int>::type> 21511 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21512 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,Dispatch const & d) const21513 Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, 21514 PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, 21515 Dispatch const & d ) const 21516 { 21517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21518 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21519 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && 21520 "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" ); 21521 # endif 21522 21523 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties( 21524 pipelineExecutablePropertiesKHRAllocator ); 21525 uint32_t executableCount; 21526 VULKAN_HPP_NAMESPACE::Result result; 21527 do 21528 { 21529 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21530 d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 21531 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) 21532 { 21533 properties.resize( executableCount ); 21534 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21535 d.vkGetPipelineExecutablePropertiesKHR( m_device, 21536 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 21537 &executableCount, 21538 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 21539 } 21540 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21541 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 21542 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 21543 if ( executableCount < properties.size() ) 21544 { 21545 properties.resize( executableCount ); 21546 } 21547 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 21548 } 21549 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21550 21551 template <typename Dispatch> 21552 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pStatisticCount,VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,Dispatch const & d) const21553 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 21554 uint32_t * pStatisticCount, 21555 VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, 21556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21557 { 21558 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21559 return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( static_cast<VkDevice>( m_device ), 21560 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 21561 pStatisticCount, 21562 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) ); 21563 } 21564 21565 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21566 template <typename PipelineExecutableStatisticKHRAllocator, 21567 typename Dispatch, 21568 typename std::enable_if< 21569 std::is_same<typename PipelineExecutableStatisticKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, 21570 int>::type> 21571 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21572 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const21573 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const 21574 { 21575 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21576 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21577 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && 21578 "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 21579 # endif 21580 21581 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics; 21582 uint32_t statisticCount; 21583 VULKAN_HPP_NAMESPACE::Result result; 21584 do 21585 { 21586 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR( 21587 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); 21588 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) 21589 { 21590 statistics.resize( statisticCount ); 21591 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21592 d.vkGetPipelineExecutableStatisticsKHR( m_device, 21593 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 21594 &statisticCount, 21595 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 21596 } 21597 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21598 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 21599 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 21600 if ( statisticCount < statistics.size() ) 21601 { 21602 statistics.resize( statisticCount ); 21603 } 21604 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); 21605 } 21606 21607 template <typename PipelineExecutableStatisticKHRAllocator, 21608 typename Dispatch, 21609 typename std::enable_if< 21610 std::is_same<typename PipelineExecutableStatisticKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, 21611 int>::type> 21612 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21613 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,Dispatch const & d) const21614 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, 21615 PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, 21616 Dispatch const & d ) const 21617 { 21618 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21619 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21620 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && 21621 "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 21622 # endif 21623 21624 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics( 21625 pipelineExecutableStatisticKHRAllocator ); 21626 uint32_t statisticCount; 21627 VULKAN_HPP_NAMESPACE::Result result; 21628 do 21629 { 21630 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR( 21631 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); 21632 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) 21633 { 21634 statistics.resize( statisticCount ); 21635 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21636 d.vkGetPipelineExecutableStatisticsKHR( m_device, 21637 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 21638 &statisticCount, 21639 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 21640 } 21641 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21642 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 21643 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 21644 if ( statisticCount < statistics.size() ) 21645 { 21646 statistics.resize( statisticCount ); 21647 } 21648 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); 21649 } 21650 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21651 21652 template <typename Dispatch> 21653 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pInternalRepresentationCount,VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,Dispatch const & d) const21654 Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 21655 uint32_t * pInternalRepresentationCount, 21656 VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, 21657 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21658 { 21659 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21660 return static_cast<Result>( 21661 d.vkGetPipelineExecutableInternalRepresentationsKHR( static_cast<VkDevice>( m_device ), 21662 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 21663 pInternalRepresentationCount, 21664 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) ); 21665 } 21666 21667 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21668 template <typename PipelineExecutableInternalRepresentationKHRAllocator, 21669 typename Dispatch, 21670 typename std::enable_if<std::is_same<typename PipelineExecutableInternalRepresentationKHRAllocator::value_type, 21671 VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value, 21672 int>::type> 21673 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 21674 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const21675 Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const 21676 { 21677 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21678 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21679 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && 21680 "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 21681 # endif 21682 21683 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 21684 internalRepresentations; 21685 uint32_t internalRepresentationCount; 21686 VULKAN_HPP_NAMESPACE::Result result; 21687 do 21688 { 21689 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 21690 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); 21691 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) 21692 { 21693 internalRepresentations.resize( internalRepresentationCount ); 21694 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 21695 m_device, 21696 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 21697 &internalRepresentationCount, 21698 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 21699 } 21700 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21701 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 21702 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 21703 if ( internalRepresentationCount < internalRepresentations.size() ) 21704 { 21705 internalRepresentations.resize( internalRepresentationCount ); 21706 } 21707 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); 21708 } 21709 21710 template <typename PipelineExecutableInternalRepresentationKHRAllocator, 21711 typename Dispatch, 21712 typename std::enable_if<std::is_same<typename PipelineExecutableInternalRepresentationKHRAllocator::value_type, 21713 VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value, 21714 int>::type> 21715 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 21716 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,Dispatch const & d) const21717 Device::getPipelineExecutableInternalRepresentationsKHR( 21718 const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, 21719 PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, 21720 Dispatch const & d ) const 21721 { 21722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21723 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21724 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && 21725 "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 21726 # endif 21727 21728 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 21729 internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); 21730 uint32_t internalRepresentationCount; 21731 VULKAN_HPP_NAMESPACE::Result result; 21732 do 21733 { 21734 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 21735 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); 21736 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) 21737 { 21738 internalRepresentations.resize( internalRepresentationCount ); 21739 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 21740 m_device, 21741 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 21742 &internalRepresentationCount, 21743 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 21744 } 21745 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21746 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 21747 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 21748 if ( internalRepresentationCount < internalRepresentations.size() ) 21749 { 21750 internalRepresentations.resize( internalRepresentationCount ); 21751 } 21752 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); 21753 } 21754 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21755 21756 //=== VK_EXT_host_image_copy === 21757 21758 template <typename Dispatch> copyMemoryToImageEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo,Dispatch const & d) const21759 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, 21760 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21761 { 21762 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21763 return static_cast<Result>( 21764 d.vkCopyMemoryToImageEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyMemoryToImageInfo *>( pCopyMemoryToImageInfo ) ) ); 21765 } 21766 21767 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21768 template <typename Dispatch> 21769 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyMemoryToImageEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo,Dispatch const & d) const21770 Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const 21771 { 21772 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21773 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21774 VULKAN_HPP_ASSERT( d.vkCopyMemoryToImageEXT && "Function <vkCopyMemoryToImageEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 21775 # endif 21776 21777 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21778 d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfo *>( ©MemoryToImageInfo ) ) ); 21779 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); 21780 21781 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21782 } 21783 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21784 21785 template <typename Dispatch> copyImageToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo,Dispatch const & d) const21786 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, 21787 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21788 { 21789 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21790 return static_cast<Result>( 21791 d.vkCopyImageToMemoryEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToMemoryInfo *>( pCopyImageToMemoryInfo ) ) ); 21792 } 21793 21794 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21795 template <typename Dispatch> 21796 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo,Dispatch const & d) const21797 Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const 21798 { 21799 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21800 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21801 VULKAN_HPP_ASSERT( d.vkCopyImageToMemoryEXT && "Function <vkCopyImageToMemoryEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 21802 # endif 21803 21804 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21805 d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfo *>( ©ImageToMemoryInfo ) ) ); 21806 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); 21807 21808 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21809 } 21810 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21811 21812 template <typename Dispatch> copyImageToImageEXT(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo,Dispatch const & d) const21813 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, 21814 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21815 { 21816 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21817 return static_cast<Result>( 21818 d.vkCopyImageToImageEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToImageInfo *>( pCopyImageToImageInfo ) ) ); 21819 } 21820 21821 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21822 template <typename Dispatch> 21823 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToImageEXT(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo,Dispatch const & d) const21824 Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const 21825 { 21826 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21827 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21828 VULKAN_HPP_ASSERT( d.vkCopyImageToImageEXT && "Function <vkCopyImageToImageEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 21829 # endif 21830 21831 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21832 d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfo *>( ©ImageToImageInfo ) ) ); 21833 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); 21834 21835 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21836 } 21837 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21838 21839 template <typename Dispatch> transitionImageLayoutEXT(uint32_t transitionCount,const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions,Dispatch const & d) const21840 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, 21841 const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, 21842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21843 { 21844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21845 return static_cast<Result>( d.vkTransitionImageLayoutEXT( 21846 static_cast<VkDevice>( m_device ), transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( pTransitions ) ) ); 21847 } 21848 21849 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21850 template <typename Dispatch> 21851 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type transitionImageLayoutEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo> const & transitions,Dispatch const & d) const21852 Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo> const & transitions, 21853 Dispatch const & d ) const 21854 { 21855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21856 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21857 VULKAN_HPP_ASSERT( d.vkTransitionImageLayoutEXT && "Function <vkTransitionImageLayoutEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); 21858 # endif 21859 21860 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21861 d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( transitions.data() ) ) ); 21862 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); 21863 21864 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21865 } 21866 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21867 21868 template <typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const21869 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, 21870 const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, 21871 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 21872 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21873 { 21874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21875 d.vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ), 21876 static_cast<VkImage>( image ), 21877 reinterpret_cast<const VkImageSubresource2 *>( pSubresource ), 21878 reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 21879 } 21880 21881 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21882 template <typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const21883 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2EXT( 21884 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21885 { 21886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21887 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21888 VULKAN_HPP_ASSERT( 21889 d.vkGetImageSubresourceLayout2EXT && 21890 "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 21891 # endif 21892 21893 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 21894 d.vkGetImageSubresourceLayout2EXT( m_device, 21895 static_cast<VkImage>( image ), 21896 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 21897 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 21898 21899 return layout; 21900 } 21901 21902 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const21903 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT( 21904 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21905 { 21906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21907 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21908 VULKAN_HPP_ASSERT( 21909 d.vkGetImageSubresourceLayout2EXT && 21910 "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 21911 # endif 21912 21913 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 21914 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 21915 d.vkGetImageSubresourceLayout2EXT( m_device, 21916 static_cast<VkImage>( image ), 21917 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 21918 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 21919 21920 return structureChain; 21921 } 21922 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21923 21924 //=== VK_KHR_map_memory2 === 21925 21926 template <typename Dispatch> mapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo,void ** ppData,Dispatch const & d) const21927 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, 21928 void ** ppData, 21929 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21930 { 21931 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21932 return static_cast<Result>( d.vkMapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryMapInfo *>( pMemoryMapInfo ), ppData ) ); 21933 } 21934 21935 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21936 template <typename Dispatch> 21937 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type mapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo,Dispatch const & d) const21938 Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, Dispatch const & d ) const 21939 { 21940 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21941 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21942 VULKAN_HPP_ASSERT( d.vkMapMemory2KHR && "Function <vkMapMemory2KHR> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); 21943 # endif 21944 21945 void * pData; 21946 VULKAN_HPP_NAMESPACE::Result result = 21947 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfo *>( &memoryMapInfo ), &pData ) ); 21948 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); 21949 21950 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); 21951 } 21952 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21953 21954 template <typename Dispatch> unmapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo,Dispatch const & d) const21955 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, 21956 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21957 { 21958 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21959 return static_cast<Result>( d.vkUnmapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfo *>( pMemoryUnmapInfo ) ) ); 21960 } 21961 21962 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21963 template <typename Dispatch> unmapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo,Dispatch const & d) const21964 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, 21965 Dispatch const & d ) const 21966 { 21967 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21968 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21969 VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function <vkUnmapMemory2KHR> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); 21970 # endif 21971 21972 VULKAN_HPP_NAMESPACE::Result result = 21973 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfo *>( &memoryUnmapInfo ) ) ); 21974 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); 21975 21976 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21977 } 21978 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21979 21980 //=== VK_EXT_swapchain_maintenance1 === 21981 21982 template <typename Dispatch> releaseSwapchainImagesEXT(const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo,Dispatch const & d) const21983 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, 21984 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21985 { 21986 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21987 return static_cast<Result>( 21988 d.vkReleaseSwapchainImagesEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) ); 21989 } 21990 21991 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21992 template <typename Dispatch> 21993 VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseSwapchainImagesEXT(const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo,Dispatch const & d) const21994 Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const 21995 { 21996 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21997 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21998 VULKAN_HPP_ASSERT( d.vkReleaseSwapchainImagesEXT && "Function <vkReleaseSwapchainImagesEXT> requires <VK_EXT_swapchain_maintenance1>" ); 21999 # endif 22000 22001 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22002 d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) ) ); 22003 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); 22004 22005 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 22006 } 22007 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22008 22009 //=== VK_NV_device_generated_commands === 22010 22011 template <typename Dispatch> getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const22012 VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, 22013 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 22014 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22015 { 22016 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22017 d.vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ), 22018 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), 22019 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 22020 } 22021 22022 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22023 template <typename Dispatch> 22024 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const22025 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, 22026 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22027 { 22028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22029 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22030 VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && 22031 "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" ); 22032 # endif 22033 22034 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 22035 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 22036 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 22037 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 22038 22039 return memoryRequirements; 22040 } 22041 22042 template <typename X, typename Y, typename... Z, typename Dispatch> 22043 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const22044 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, 22045 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22046 { 22047 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22048 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22049 VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && 22050 "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" ); 22051 # endif 22052 22053 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 22054 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 22055 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 22056 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 22057 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 22058 22059 return structureChain; 22060 } 22061 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22062 22063 template <typename Dispatch> preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const22064 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 22065 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22066 { 22067 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22068 d.vkCmdPreprocessGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 22069 reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 22070 } 22071 22072 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22073 template <typename Dispatch> preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const22074 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, 22075 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22076 { 22077 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22078 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22079 VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsNV && "Function <vkCmdPreprocessGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" ); 22080 # endif 22081 22082 d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 22083 } 22084 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22085 22086 template <typename Dispatch> executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const22087 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 22088 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 22089 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22090 { 22091 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22092 d.vkCmdExecuteGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 22093 static_cast<VkBool32>( isPreprocessed ), 22094 reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 22095 } 22096 22097 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22098 template <typename Dispatch> executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const22099 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 22100 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, 22101 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22102 { 22103 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22104 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22105 VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsNV && "Function <vkCmdExecuteGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" ); 22106 # endif 22107 22108 d.vkCmdExecuteGeneratedCommandsNV( 22109 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 22110 } 22111 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22112 22113 template <typename Dispatch> bindPipelineShaderGroupNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t groupIndex,Dispatch const & d) const22114 VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 22115 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 22116 uint32_t groupIndex, 22117 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22118 { 22119 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22120 d.vkCmdBindPipelineShaderGroupNV( 22121 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex ); 22122 } 22123 22124 template <typename Dispatch> 22125 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,Dispatch const & d) const22126 Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, 22127 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22128 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, 22129 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22130 { 22131 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22132 return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), 22133 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), 22134 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22135 reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) ); 22136 } 22137 22138 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22139 template <typename Dispatch> 22140 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22141 Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, 22142 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22143 Dispatch const & d ) const 22144 { 22145 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22146 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22147 VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 22148 # endif 22149 22150 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 22151 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV( 22152 m_device, 22153 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 22154 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22155 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 22156 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); 22157 22158 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); 22159 } 22160 22161 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22162 template <typename Dispatch> 22163 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type createIndirectCommandsLayoutNVUnique(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22164 Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, 22165 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22166 Dispatch const & d ) const 22167 { 22168 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22169 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22170 VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 22171 # endif 22172 22173 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 22174 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV( 22175 m_device, 22176 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 22177 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22178 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 22179 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); 22180 22181 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 22182 UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>( 22183 indirectCommandsLayout, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 22184 } 22185 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22186 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22187 22188 template <typename Dispatch> destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22189 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 22190 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22191 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22192 { 22193 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22194 d.vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), 22195 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 22196 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 22197 } 22198 22199 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22200 template <typename Dispatch> destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22201 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 22202 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22203 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22204 { 22205 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22206 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22207 VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 22208 # endif 22209 22210 d.vkDestroyIndirectCommandsLayoutNV( 22211 m_device, 22212 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 22213 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 22214 } 22215 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22216 22217 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22218 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 22219 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22220 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22221 { 22222 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22223 d.vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), 22224 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 22225 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 22226 } 22227 22228 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22229 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22230 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 22231 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22232 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22233 { 22234 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22235 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22236 VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 22237 # endif 22238 22239 d.vkDestroyIndirectCommandsLayoutNV( 22240 m_device, 22241 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 22242 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 22243 } 22244 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22245 22246 //=== VK_EXT_depth_bias_control === 22247 22248 template <typename Dispatch> setDepthBias2EXT(const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo,Dispatch const & d) const22249 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, 22250 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22251 { 22252 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22253 d.vkCmdSetDepthBias2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) ); 22254 } 22255 22256 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22257 template <typename Dispatch> setDepthBias2EXT(const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo,Dispatch const & d) const22258 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, 22259 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22260 { 22261 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22262 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22263 VULKAN_HPP_ASSERT( d.vkCmdSetDepthBias2EXT && "Function <vkCmdSetDepthBias2EXT> requires <VK_EXT_depth_bias_control>" ); 22264 # endif 22265 22266 d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( &depthBiasInfo ) ); 22267 } 22268 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22269 22270 //=== VK_EXT_acquire_drm_display === 22271 22272 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 22273 template <typename Dispatch> acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const22274 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, 22275 VULKAN_HPP_NAMESPACE::DisplayKHR display, 22276 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22277 { 22278 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22279 return static_cast<Result>( d.vkAcquireDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, static_cast<VkDisplayKHR>( display ) ) ); 22280 } 22281 #else 22282 template <typename Dispatch> 22283 VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const22284 PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 22285 { 22286 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22287 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22288 VULKAN_HPP_ASSERT( d.vkAcquireDrmDisplayEXT && "Function <vkAcquireDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); 22289 # endif 22290 22291 VULKAN_HPP_NAMESPACE::Result result = 22292 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); 22293 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); 22294 22295 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 22296 } 22297 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 22298 22299 template <typename Dispatch> getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,VULKAN_HPP_NAMESPACE::DisplayKHR * display,Dispatch const & d) const22300 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, 22301 uint32_t connectorId, 22302 VULKAN_HPP_NAMESPACE::DisplayKHR * display, 22303 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22304 { 22305 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22306 return static_cast<Result>( 22307 d.vkGetDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) ); 22308 } 22309 22310 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22311 template <typename Dispatch> 22312 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const22313 PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 22314 { 22315 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22316 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22317 VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); 22318 # endif 22319 22320 VULKAN_HPP_NAMESPACE::DisplayKHR display; 22321 VULKAN_HPP_NAMESPACE::Result result = 22322 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 22323 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); 22324 22325 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); 22326 } 22327 22328 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22329 template <typename Dispatch> 22330 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getDrmDisplayEXTUnique(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const22331 PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 22332 { 22333 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22334 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22335 VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); 22336 # endif 22337 22338 VULKAN_HPP_NAMESPACE::DisplayKHR display; 22339 VULKAN_HPP_NAMESPACE::Result result = 22340 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 22341 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); 22342 22343 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 22344 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, detail::ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 22345 } 22346 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22347 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22348 22349 //=== VK_EXT_private_data === 22350 22351 template <typename Dispatch> createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,Dispatch const & d) const22352 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, 22353 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22354 VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, 22355 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22356 { 22357 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22358 return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( static_cast<VkDevice>( m_device ), 22359 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), 22360 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22361 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); 22362 } 22363 22364 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22365 template <typename Dispatch> 22366 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22367 Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 22368 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22369 Dispatch const & d ) const 22370 { 22371 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22372 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22373 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 22374 # endif 22375 22376 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 22377 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT( 22378 m_device, 22379 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 22380 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22381 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 22382 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); 22383 22384 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); 22385 } 22386 22387 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22388 template <typename Dispatch> 22389 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotEXTUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22390 Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 22391 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22392 Dispatch const & d ) const 22393 { 22394 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22395 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22396 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 22397 # endif 22398 22399 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 22400 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT( 22401 m_device, 22402 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 22403 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22404 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 22405 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); 22406 22407 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 22408 result, 22409 UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 22410 } 22411 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22412 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22413 22414 template <typename Dispatch> destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22415 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 22416 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22417 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22418 { 22419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22420 d.vkDestroyPrivateDataSlotEXT( 22421 static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 22422 } 22423 22424 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22425 template <typename Dispatch> destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22426 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 22427 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22428 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22429 { 22430 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22431 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22432 VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlotEXT && "Function <vkDestroyPrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 22433 # endif 22434 22435 d.vkDestroyPrivateDataSlotEXT( 22436 m_device, 22437 static_cast<VkPrivateDataSlot>( privateDataSlot ), 22438 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 22439 } 22440 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22441 22442 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 22443 template <typename Dispatch> setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const22444 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 22445 uint64_t objectHandle, 22446 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 22447 uint64_t data, 22448 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22449 { 22450 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22451 return static_cast<Result>( d.vkSetPrivateDataEXT( 22452 static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 22453 } 22454 #else 22455 template <typename Dispatch> setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const22456 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 22457 uint64_t objectHandle, 22458 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 22459 uint64_t data, 22460 Dispatch const & d ) const 22461 { 22462 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22463 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22464 VULKAN_HPP_ASSERT( d.vkSetPrivateDataEXT && "Function <vkSetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 22465 # endif 22466 22467 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22468 d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 22469 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); 22470 22471 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 22472 } 22473 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 22474 22475 template <typename Dispatch> getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t * pData,Dispatch const & d) const22476 VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 22477 uint64_t objectHandle, 22478 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 22479 uint64_t * pData, 22480 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22481 { 22482 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22483 d.vkGetPrivateDataEXT( 22484 static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); 22485 } 22486 22487 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22488 template <typename Dispatch> getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Dispatch const & d) const22489 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 22490 uint64_t objectHandle, 22491 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 22492 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22493 { 22494 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22495 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22496 VULKAN_HPP_ASSERT( d.vkGetPrivateDataEXT && "Function <vkGetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 22497 # endif 22498 22499 uint64_t data; 22500 d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); 22501 22502 return data; 22503 } 22504 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22505 22506 //=== VK_KHR_video_encode_queue === 22507 22508 template <typename Dispatch> 22509 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo,VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties,Dispatch const & d) const22510 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, 22511 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, 22512 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22513 { 22514 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22515 return static_cast<Result>( 22516 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), 22517 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( pQualityLevelInfo ), 22518 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( pQualityLevelProperties ) ) ); 22519 } 22520 22521 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22522 template <typename Dispatch> 22523 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>::type getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,Dispatch const & d) const22524 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, 22525 Dispatch const & d ) const 22526 { 22527 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22528 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22529 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && 22530 "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" ); 22531 # endif 22532 22533 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; 22534 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22535 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 22536 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), 22537 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); 22538 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); 22539 22540 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( qualityLevelProperties ) ); 22541 } 22542 22543 template <typename X, typename Y, typename... Z, typename Dispatch> 22544 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,Dispatch const & d) const22545 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, 22546 Dispatch const & d ) const 22547 { 22548 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22549 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22550 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && 22551 "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" ); 22552 # endif 22553 22554 StructureChain<X, Y, Z...> structureChain; 22555 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = 22556 structureChain.template get<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>(); 22557 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22558 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 22559 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), 22560 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); 22561 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); 22562 22563 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 22564 } 22565 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22566 22567 template <typename Dispatch> 22568 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo,VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo,size_t * pDataSize,void * pData,Dispatch const & d) const22569 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, 22570 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, 22571 size_t * pDataSize, 22572 void * pData, 22573 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22574 { 22575 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22576 return static_cast<Result>( 22577 d.vkGetEncodedVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), 22578 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( pVideoSessionParametersInfo ), 22579 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( pFeedbackInfo ), 22580 pDataSize, 22581 pData ) ); 22582 } 22583 22584 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22585 template <typename Uint8_tAllocator, 22586 typename Dispatch, 22587 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 22588 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 22589 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Dispatch const & d) const22590 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 22591 Dispatch const & d ) const 22592 { 22593 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22594 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22595 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 22596 # endif 22597 22598 std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_; 22599 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; 22600 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 22601 size_t dataSize; 22602 VULKAN_HPP_NAMESPACE::Result result; 22603 do 22604 { 22605 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22606 d.vkGetEncodedVideoSessionParametersKHR( m_device, 22607 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 22608 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 22609 &dataSize, 22610 nullptr ) ); 22611 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 22612 { 22613 data.resize( dataSize ); 22614 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22615 d.vkGetEncodedVideoSessionParametersKHR( m_device, 22616 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 22617 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 22618 &dataSize, 22619 reinterpret_cast<void *>( data.data() ) ) ); 22620 } 22621 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22622 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 22623 22624 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 22625 } 22626 22627 template <typename Uint8_tAllocator, 22628 typename Dispatch, 22629 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 22630 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 22631 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const22632 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 22633 Uint8_tAllocator & uint8_tAllocator, 22634 Dispatch const & d ) const 22635 { 22636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22637 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22638 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 22639 # endif 22640 22641 std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_( 22642 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 22643 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; 22644 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 22645 size_t dataSize; 22646 VULKAN_HPP_NAMESPACE::Result result; 22647 do 22648 { 22649 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22650 d.vkGetEncodedVideoSessionParametersKHR( m_device, 22651 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 22652 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 22653 &dataSize, 22654 nullptr ) ); 22655 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 22656 { 22657 data.resize( dataSize ); 22658 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22659 d.vkGetEncodedVideoSessionParametersKHR( m_device, 22660 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 22661 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 22662 &dataSize, 22663 reinterpret_cast<void *>( data.data() ) ) ); 22664 } 22665 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22666 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 22667 22668 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 22669 } 22670 22671 template <typename X, 22672 typename Y, 22673 typename... Z, 22674 typename Uint8_tAllocator, 22675 typename Dispatch, 22676 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 22677 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 22678 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Dispatch const & d) const22679 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 22680 Dispatch const & d ) const 22681 { 22682 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22683 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22684 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 22685 # endif 22686 22687 std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_; 22688 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = 22689 data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>(); 22690 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 22691 size_t dataSize; 22692 VULKAN_HPP_NAMESPACE::Result result; 22693 do 22694 { 22695 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22696 d.vkGetEncodedVideoSessionParametersKHR( m_device, 22697 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 22698 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 22699 &dataSize, 22700 nullptr ) ); 22701 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 22702 { 22703 data.resize( dataSize ); 22704 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22705 d.vkGetEncodedVideoSessionParametersKHR( m_device, 22706 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 22707 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 22708 &dataSize, 22709 reinterpret_cast<void *>( data.data() ) ) ); 22710 } 22711 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22712 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 22713 22714 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 22715 } 22716 22717 template <typename X, 22718 typename Y, 22719 typename... Z, 22720 typename Uint8_tAllocator, 22721 typename Dispatch, 22722 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 22723 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 22724 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const22725 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 22726 Uint8_tAllocator & uint8_tAllocator, 22727 Dispatch const & d ) const 22728 { 22729 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22730 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22731 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 22732 # endif 22733 22734 std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_( 22735 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 22736 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = 22737 data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>(); 22738 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 22739 size_t dataSize; 22740 VULKAN_HPP_NAMESPACE::Result result; 22741 do 22742 { 22743 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22744 d.vkGetEncodedVideoSessionParametersKHR( m_device, 22745 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 22746 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 22747 &dataSize, 22748 nullptr ) ); 22749 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 22750 { 22751 data.resize( dataSize ); 22752 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22753 d.vkGetEncodedVideoSessionParametersKHR( m_device, 22754 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 22755 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 22756 &dataSize, 22757 reinterpret_cast<void *>( data.data() ) ) ); 22758 } 22759 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22760 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 22761 22762 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 22763 } 22764 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22765 22766 template <typename Dispatch> encodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,Dispatch const & d) const22767 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, 22768 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22769 { 22770 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22771 d.vkCmdEncodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) ); 22772 } 22773 22774 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22775 template <typename Dispatch> encodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,Dispatch const & d) const22776 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, 22777 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22778 { 22779 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22780 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22781 VULKAN_HPP_ASSERT( d.vkCmdEncodeVideoKHR && "Function <vkCmdEncodeVideoKHR> requires <VK_KHR_video_encode_queue>" ); 22782 # endif 22783 22784 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) ); 22785 } 22786 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22787 22788 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 22789 //=== VK_NV_cuda_kernel_launch === 22790 22791 template <typename Dispatch> createCudaModuleNV(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule,Dispatch const & d) const22792 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, 22793 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22794 VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, 22795 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22796 { 22797 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22798 return static_cast<Result>( d.vkCreateCudaModuleNV( static_cast<VkDevice>( m_device ), 22799 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ), 22800 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22801 reinterpret_cast<VkCudaModuleNV *>( pModule ) ) ); 22802 } 22803 22804 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22805 template <typename Dispatch> 22806 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaModuleNV>::type createCudaModuleNV(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22807 Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, 22808 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22809 Dispatch const & d ) const 22810 { 22811 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22812 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22813 VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 22814 # endif 22815 22816 VULKAN_HPP_NAMESPACE::CudaModuleNV module; 22817 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22818 d.vkCreateCudaModuleNV( m_device, 22819 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), 22820 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22821 reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); 22822 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); 22823 22824 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); 22825 } 22826 22827 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22828 template <typename Dispatch> 22829 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>>::type createCudaModuleNVUnique(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22830 Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, 22831 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22832 Dispatch const & d ) const 22833 { 22834 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22835 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22836 VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 22837 # endif 22838 22839 VULKAN_HPP_NAMESPACE::CudaModuleNV module; 22840 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22841 d.vkCreateCudaModuleNV( m_device, 22842 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), 22843 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22844 reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); 22845 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); 22846 22847 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 22848 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>( module, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 22849 } 22850 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22851 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22852 22853 template <typename Dispatch> getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,size_t * pCacheSize,void * pCacheData,Dispatch const & d) const22854 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 22855 size_t * pCacheSize, 22856 void * pCacheData, 22857 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22858 { 22859 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22860 return static_cast<Result>( d.vkGetCudaModuleCacheNV( static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) ); 22861 } 22862 22863 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22864 template <typename Uint8_tAllocator, 22865 typename Dispatch, 22866 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 22867 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Dispatch const & d) const22868 Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const 22869 { 22870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22871 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22872 VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" ); 22873 # endif 22874 22875 std::vector<uint8_t, Uint8_tAllocator> cacheData; 22876 size_t cacheSize; 22877 VULKAN_HPP_NAMESPACE::Result result; 22878 do 22879 { 22880 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); 22881 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) 22882 { 22883 cacheData.resize( cacheSize ); 22884 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22885 d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); 22886 } 22887 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22888 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); 22889 VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); 22890 if ( cacheSize < cacheData.size() ) 22891 { 22892 cacheData.resize( cacheSize ); 22893 } 22894 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); 22895 } 22896 22897 template <typename Uint8_tAllocator, 22898 typename Dispatch, 22899 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 22900 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const22901 Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 22902 { 22903 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22904 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22905 VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" ); 22906 # endif 22907 22908 std::vector<uint8_t, Uint8_tAllocator> cacheData( uint8_tAllocator ); 22909 size_t cacheSize; 22910 VULKAN_HPP_NAMESPACE::Result result; 22911 do 22912 { 22913 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); 22914 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) 22915 { 22916 cacheData.resize( cacheSize ); 22917 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22918 d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); 22919 } 22920 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22921 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); 22922 VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); 22923 if ( cacheSize < cacheData.size() ) 22924 { 22925 cacheData.resize( cacheSize ); 22926 } 22927 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); 22928 } 22929 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22930 22931 template <typename Dispatch> createCudaFunctionNV(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction,Dispatch const & d) const22932 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, 22933 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22934 VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, 22935 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22936 { 22937 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22938 return static_cast<Result>( d.vkCreateCudaFunctionNV( static_cast<VkDevice>( m_device ), 22939 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ), 22940 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22941 reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) ); 22942 } 22943 22944 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22945 template <typename Dispatch> 22946 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::type createCudaFunctionNV(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22947 Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, 22948 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22949 Dispatch const & d ) const 22950 { 22951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22952 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22953 VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 22954 # endif 22955 22956 VULKAN_HPP_NAMESPACE::CudaFunctionNV function; 22957 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22958 d.vkCreateCudaFunctionNV( m_device, 22959 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), 22960 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22961 reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); 22962 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); 22963 22964 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); 22965 } 22966 22967 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22968 template <typename Dispatch> 22969 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>>::type createCudaFunctionNVUnique(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22970 Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, 22971 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22972 Dispatch const & d ) const 22973 { 22974 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22975 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22976 VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 22977 # endif 22978 22979 VULKAN_HPP_NAMESPACE::CudaFunctionNV function; 22980 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22981 d.vkCreateCudaFunctionNV( m_device, 22982 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), 22983 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22984 reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); 22985 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); 22986 22987 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 22988 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>( function, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 22989 } 22990 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22991 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22992 22993 template <typename Dispatch> destroyCudaModuleNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22994 VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 22995 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22996 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22997 { 22998 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22999 d.vkDestroyCudaModuleNV( 23000 static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23001 } 23002 23003 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23004 template <typename Dispatch> destroyCudaModuleNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23005 VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 23006 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23007 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23008 { 23009 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23010 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23011 VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 23012 # endif 23013 23014 d.vkDestroyCudaModuleNV( m_device, 23015 static_cast<VkCudaModuleNV>( module ), 23016 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23017 } 23018 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23019 23020 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaModuleNV module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23021 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 23022 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23023 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23024 { 23025 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23026 d.vkDestroyCudaModuleNV( 23027 static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23028 } 23029 23030 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23031 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23032 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 23033 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23034 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23035 { 23036 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23037 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23038 VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 23039 # endif 23040 23041 d.vkDestroyCudaModuleNV( m_device, 23042 static_cast<VkCudaModuleNV>( module ), 23043 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23044 } 23045 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23046 23047 template <typename Dispatch> destroyCudaFunctionNV(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23048 VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 23049 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23050 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23051 { 23052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23053 d.vkDestroyCudaFunctionNV( 23054 static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23055 } 23056 23057 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23058 template <typename Dispatch> destroyCudaFunctionNV(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23059 VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 23060 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23061 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23062 { 23063 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23064 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23065 VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 23066 # endif 23067 23068 d.vkDestroyCudaFunctionNV( m_device, 23069 static_cast<VkCudaFunctionNV>( function ), 23070 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23071 } 23072 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23073 23074 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23075 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 23076 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23077 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23078 { 23079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23080 d.vkDestroyCudaFunctionNV( 23081 static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23082 } 23083 23084 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23085 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23086 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 23087 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23088 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23089 { 23090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23091 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23092 VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 23093 # endif 23094 23095 d.vkDestroyCudaFunctionNV( m_device, 23096 static_cast<VkCudaFunctionNV>( function ), 23097 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23098 } 23099 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23100 23101 template <typename Dispatch> cudaLaunchKernelNV(const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo,Dispatch const & d) const23102 VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, 23103 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23104 { 23105 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23106 d.vkCmdCudaLaunchKernelNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) ); 23107 } 23108 23109 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23110 template <typename Dispatch> cudaLaunchKernelNV(const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo,Dispatch const & d) const23111 VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, 23112 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23113 { 23114 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23115 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23116 VULKAN_HPP_ASSERT( d.vkCmdCudaLaunchKernelNV && "Function <vkCmdCudaLaunchKernelNV> requires <VK_NV_cuda_kernel_launch>" ); 23117 # endif 23118 23119 d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) ); 23120 } 23121 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23122 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 23123 23124 #if defined( VK_USE_PLATFORM_METAL_EXT ) 23125 //=== VK_EXT_metal_objects === 23126 23127 template <typename Dispatch> exportMetalObjectsEXT(VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,Dispatch const & d) const23128 VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, 23129 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23130 { 23131 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23132 d.vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) ); 23133 } 23134 23135 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23136 template <typename Dispatch> 23137 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT(Dispatch const & d) const23138 Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23139 { 23140 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23141 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23142 VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" ); 23143 # endif 23144 23145 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; 23146 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) ); 23147 23148 return metalObjectsInfo; 23149 } 23150 23151 template <typename X, typename Y, typename... Z, typename Dispatch> 23152 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> exportMetalObjectsEXT(Dispatch const & d) const23153 Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23154 { 23155 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23156 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23157 VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" ); 23158 # endif 23159 23160 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 23161 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>(); 23162 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) ); 23163 23164 return structureChain; 23165 } 23166 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23167 #endif /*VK_USE_PLATFORM_METAL_EXT*/ 23168 23169 //=== VK_KHR_synchronization2 === 23170 23171 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const23172 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 23173 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 23174 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23175 { 23176 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23177 d.vkCmdSetEvent2KHR( 23178 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 23179 } 23180 23181 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23182 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const23183 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 23184 const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 23185 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23186 { 23187 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23188 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23189 VULKAN_HPP_ASSERT( d.vkCmdSetEvent2KHR && "Function <vkCmdSetEvent2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 23190 # endif 23191 23192 d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 23193 } 23194 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23195 23196 template <typename Dispatch> resetEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const23197 VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 23198 VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, 23199 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23200 { 23201 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23202 d.vkCmdResetEvent2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); 23203 } 23204 23205 template <typename Dispatch> waitEvents2KHR(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const23206 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, 23207 const VULKAN_HPP_NAMESPACE::Event * pEvents, 23208 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, 23209 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23210 { 23211 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23212 d.vkCmdWaitEvents2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 23213 eventCount, 23214 reinterpret_cast<const VkEvent *>( pEvents ), 23215 reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); 23216 } 23217 23218 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23219 template <typename Dispatch> waitEvents2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const23220 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 23221 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, 23222 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 23223 { 23224 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23225 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23226 VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2KHR && "Function <vkCmdWaitEvents2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 23227 # endif 23228 # ifdef VULKAN_HPP_NO_EXCEPTIONS 23229 VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); 23230 # else 23231 if ( events.size() != dependencyInfos.size() ) 23232 { 23233 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); 23234 } 23235 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 23236 23237 d.vkCmdWaitEvents2KHR( m_commandBuffer, 23238 events.size(), 23239 reinterpret_cast<const VkEvent *>( events.data() ), 23240 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); 23241 } 23242 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23243 23244 template <typename Dispatch> pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const23245 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 23246 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23247 { 23248 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23249 d.vkCmdPipelineBarrier2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 23250 } 23251 23252 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23253 template <typename Dispatch> pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const23254 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 23255 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23256 { 23257 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23258 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23259 VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2KHR && "Function <vkCmdPipelineBarrier2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 23260 # endif 23261 23262 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 23263 } 23264 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23265 23266 template <typename Dispatch> writeTimestamp2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const23267 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 23268 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 23269 uint32_t query, 23270 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23271 { 23272 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23273 d.vkCmdWriteTimestamp2KHR( 23274 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); 23275 } 23276 23277 template <typename Dispatch> submit2KHR(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const23278 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, 23279 const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, 23280 VULKAN_HPP_NAMESPACE::Fence fence, 23281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23282 { 23283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23284 return static_cast<Result>( 23285 d.vkQueueSubmit2KHR( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 23286 } 23287 23288 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23289 template <typename Dispatch> submit2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const23290 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR( 23291 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 23292 { 23293 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23294 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23295 VULKAN_HPP_ASSERT( d.vkQueueSubmit2KHR && "Function <vkQueueSubmit2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 23296 # endif 23297 23298 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23299 d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 23300 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); 23301 23302 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 23303 } 23304 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23305 23306 //=== VK_EXT_descriptor_buffer === 23307 23308 template <typename Dispatch> getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes,Dispatch const & d) const23309 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, 23310 VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, 23311 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23312 { 23313 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23314 d.vkGetDescriptorSetLayoutSizeEXT( 23315 static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) ); 23316 } 23317 23318 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23319 template <typename Dispatch> 23320 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,Dispatch const & d) const23321 Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23322 { 23323 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23324 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23325 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSizeEXT && "Function <vkGetDescriptorSetLayoutSizeEXT> requires <VK_EXT_descriptor_buffer>" ); 23326 # endif 23327 23328 VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; 23329 d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) ); 23330 23331 return layoutSizeInBytes; 23332 } 23333 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23334 23335 template <typename Dispatch> getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,uint32_t binding,VULKAN_HPP_NAMESPACE::DeviceSize * pOffset,Dispatch const & d) const23336 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, 23337 uint32_t binding, 23338 VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, 23339 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23340 { 23341 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23342 d.vkGetDescriptorSetLayoutBindingOffsetEXT( 23343 static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) ); 23344 } 23345 23346 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23347 template <typename Dispatch> getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,uint32_t binding,Dispatch const & d) const23348 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT( 23349 VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23350 { 23351 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23352 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23353 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutBindingOffsetEXT && 23354 "Function <vkGetDescriptorSetLayoutBindingOffsetEXT> requires <VK_EXT_descriptor_buffer>" ); 23355 # endif 23356 23357 VULKAN_HPP_NAMESPACE::DeviceSize offset; 23358 d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) ); 23359 23360 return offset; 23361 } 23362 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23363 23364 template <typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo,size_t dataSize,void * pDescriptor,Dispatch const & d) const23365 VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, 23366 size_t dataSize, 23367 void * pDescriptor, 23368 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23369 { 23370 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23371 d.vkGetDescriptorEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor ); 23372 } 23373 23374 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23375 template <typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,size_t dataSize,void * pDescriptor,Dispatch const & d) const23376 VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, 23377 size_t dataSize, 23378 void * pDescriptor, 23379 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23380 { 23381 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23382 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23383 VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" ); 23384 # endif 23385 23386 d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), dataSize, pDescriptor ); 23387 } 23388 23389 template <typename DescriptorType, typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,Dispatch const & d) const23390 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, 23391 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23392 { 23393 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23394 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23395 VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" ); 23396 # endif 23397 23398 DescriptorType descriptor; 23399 d.vkGetDescriptorEXT( 23400 m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) ); 23401 23402 return descriptor; 23403 } 23404 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23405 23406 template <typename Dispatch> bindDescriptorBuffersEXT(uint32_t bufferCount,const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos,Dispatch const & d) const23407 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount, 23408 const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, 23409 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23410 { 23411 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23412 d.vkCmdBindDescriptorBuffersEXT( 23413 static_cast<VkCommandBuffer>( m_commandBuffer ), bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) ); 23414 } 23415 23416 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23417 template <typename Dispatch> 23418 VULKAN_HPP_INLINE void bindDescriptorBuffersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos,Dispatch const & d) const23419 CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos, 23420 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23421 { 23422 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23423 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23424 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBuffersEXT && "Function <vkCmdBindDescriptorBuffersEXT> requires <VK_EXT_descriptor_buffer>" ); 23425 # endif 23426 23427 d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) ); 23428 } 23429 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23430 23431 template <typename Dispatch> setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t setCount,const uint32_t * pBufferIndices,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const23432 VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 23433 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 23434 uint32_t firstSet, 23435 uint32_t setCount, 23436 const uint32_t * pBufferIndices, 23437 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 23438 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23439 { 23440 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23441 d.vkCmdSetDescriptorBufferOffsetsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 23442 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 23443 static_cast<VkPipelineLayout>( layout ), 23444 firstSet, 23445 setCount, 23446 pBufferIndices, 23447 reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); 23448 } 23449 23450 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23451 template <typename Dispatch> setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const23452 VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 23453 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 23454 uint32_t firstSet, 23455 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices, 23456 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 23457 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 23458 { 23459 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23460 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23461 VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsetsEXT && "Function <vkCmdSetDescriptorBufferOffsetsEXT> requires <VK_EXT_descriptor_buffer>" ); 23462 # endif 23463 # ifdef VULKAN_HPP_NO_EXCEPTIONS 23464 VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); 23465 # else 23466 if ( bufferIndices.size() != offsets.size() ) 23467 { 23468 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); 23469 } 23470 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 23471 23472 d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, 23473 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 23474 static_cast<VkPipelineLayout>( layout ), 23475 firstSet, 23476 bufferIndices.size(), 23477 bufferIndices.data(), 23478 reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); 23479 } 23480 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23481 23482 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplersEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,Dispatch const & d) const23483 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 23484 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 23485 uint32_t set, 23486 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23487 { 23488 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23489 d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( 23490 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set ); 23491 } 23492 23493 template <typename Dispatch> getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const23494 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT( 23495 const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23496 { 23497 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23498 return static_cast<Result>( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( 23499 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 23500 } 23501 23502 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23503 template <typename DataType, typename Dispatch> 23504 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const23505 Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 23506 { 23507 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23508 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23509 VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureDescriptorDataEXT && 23510 "Function <vkGetBufferOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 23511 # endif 23512 23513 DataType data; 23514 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23515 d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 23516 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); 23517 23518 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 23519 } 23520 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23521 23522 template <typename Dispatch> getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const23523 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT( 23524 const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23525 { 23526 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23527 return static_cast<Result>( d.vkGetImageOpaqueCaptureDescriptorDataEXT( 23528 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 23529 } 23530 23531 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23532 template <typename DataType, typename Dispatch> 23533 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const23534 Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 23535 { 23536 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23537 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23538 VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDescriptorDataEXT && 23539 "Function <vkGetImageOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 23540 # endif 23541 23542 DataType data; 23543 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23544 d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 23545 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); 23546 23547 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 23548 } 23549 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23550 23551 template <typename Dispatch> getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const23552 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT( 23553 const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23554 { 23555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23556 return static_cast<Result>( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( 23557 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 23558 } 23559 23560 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23561 template <typename DataType, typename Dispatch> 23562 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const23563 Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 23564 { 23565 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23566 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23567 VULKAN_HPP_ASSERT( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT && 23568 "Function <vkGetImageViewOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 23569 # endif 23570 23571 DataType data; 23572 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23573 d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 23574 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); 23575 23576 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 23577 } 23578 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23579 23580 template <typename Dispatch> getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const23581 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT( 23582 const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23583 { 23584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23585 return static_cast<Result>( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( 23586 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 23587 } 23588 23589 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23590 template <typename DataType, typename Dispatch> 23591 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const23592 Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 23593 { 23594 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23595 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23596 VULKAN_HPP_ASSERT( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT && 23597 "Function <vkGetSamplerOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 23598 # endif 23599 23600 DataType data; 23601 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23602 d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 23603 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); 23604 23605 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 23606 } 23607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23608 23609 template <typename Dispatch> getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const23610 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( 23611 const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23612 { 23613 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23614 return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( 23615 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 23616 } 23617 23618 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23619 template <typename DataType, typename Dispatch> 23620 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const23621 Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, 23622 Dispatch const & d ) const 23623 { 23624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23625 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23626 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && 23627 "Function <vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 23628 # endif 23629 23630 DataType data; 23631 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( 23632 m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 23633 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); 23634 23635 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 23636 } 23637 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23638 23639 //=== VK_NV_fragment_shading_rate_enums === 23640 23641 template <typename Dispatch> setFragmentShadingRateEnumNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const23642 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, 23643 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 23644 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23645 { 23646 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23647 d.vkCmdSetFragmentShadingRateEnumNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 23648 static_cast<VkFragmentShadingRateNV>( shadingRate ), 23649 reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 23650 } 23651 23652 //=== VK_EXT_mesh_shader === 23653 23654 template <typename Dispatch> 23655 VULKAN_HPP_INLINE void drawMeshTasksEXT(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const23656 CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23657 { 23658 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23659 d.vkCmdDrawMeshTasksEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); 23660 } 23661 23662 template <typename Dispatch> drawMeshTasksIndirectEXT(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const23663 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, 23664 VULKAN_HPP_NAMESPACE::DeviceSize offset, 23665 uint32_t drawCount, 23666 uint32_t stride, 23667 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23668 { 23669 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23670 d.vkCmdDrawMeshTasksIndirectEXT( 23671 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 23672 } 23673 23674 template <typename Dispatch> drawMeshTasksIndirectCountEXT(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const23675 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, 23676 VULKAN_HPP_NAMESPACE::DeviceSize offset, 23677 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 23678 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 23679 uint32_t maxDrawCount, 23680 uint32_t stride, 23681 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23682 { 23683 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23684 d.vkCmdDrawMeshTasksIndirectCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 23685 static_cast<VkBuffer>( buffer ), 23686 static_cast<VkDeviceSize>( offset ), 23687 static_cast<VkBuffer>( countBuffer ), 23688 static_cast<VkDeviceSize>( countBufferOffset ), 23689 maxDrawCount, 23690 stride ); 23691 } 23692 23693 //=== VK_KHR_copy_commands2 === 23694 23695 template <typename Dispatch> copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const23696 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, 23697 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23698 { 23699 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23700 d.vkCmdCopyBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); 23701 } 23702 23703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23704 template <typename Dispatch> copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const23705 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, 23706 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23707 { 23708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23709 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23710 VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2KHR && "Function <vkCmdCopyBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23711 # endif 23712 23713 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); 23714 } 23715 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23716 23717 template <typename Dispatch> copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const23718 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, 23719 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23720 { 23721 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23722 d.vkCmdCopyImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); 23723 } 23724 23725 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23726 template <typename Dispatch> copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const23727 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, 23728 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23729 { 23730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23731 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23732 VULKAN_HPP_ASSERT( d.vkCmdCopyImage2KHR && "Function <vkCmdCopyImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23733 # endif 23734 23735 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); 23736 } 23737 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23738 23739 template <typename Dispatch> copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const23740 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, 23741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23742 { 23743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23744 d.vkCmdCopyBufferToImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 23745 reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); 23746 } 23747 23748 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23749 template <typename Dispatch> copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const23750 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, 23751 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23752 { 23753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23754 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23755 VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2KHR && "Function <vkCmdCopyBufferToImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23756 # endif 23757 23758 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); 23759 } 23760 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23761 23762 template <typename Dispatch> copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const23763 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, 23764 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23765 { 23766 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23767 d.vkCmdCopyImageToBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 23768 reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); 23769 } 23770 23771 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23772 template <typename Dispatch> copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const23773 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, 23774 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23775 { 23776 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23777 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23778 VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2KHR && "Function <vkCmdCopyImageToBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23779 # endif 23780 23781 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); 23782 } 23783 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23784 23785 template <typename Dispatch> blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const23786 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, 23787 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23788 { 23789 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23790 d.vkCmdBlitImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); 23791 } 23792 23793 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23794 template <typename Dispatch> blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const23795 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, 23796 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23797 { 23798 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23799 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23800 VULKAN_HPP_ASSERT( d.vkCmdBlitImage2KHR && "Function <vkCmdBlitImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23801 # endif 23802 23803 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); 23804 } 23805 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23806 23807 template <typename Dispatch> resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const23808 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, 23809 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23810 { 23811 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23812 d.vkCmdResolveImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); 23813 } 23814 23815 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23816 template <typename Dispatch> resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const23817 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, 23818 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23819 { 23820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23821 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23822 VULKAN_HPP_ASSERT( d.vkCmdResolveImage2KHR && "Function <vkCmdResolveImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 23823 # endif 23824 23825 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); 23826 } 23827 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23828 23829 //=== VK_EXT_device_fault === 23830 23831 template <typename Dispatch> getFaultInfoEXT(VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,Dispatch const & d) const23832 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, 23833 VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, 23834 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23835 { 23836 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23837 return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( 23838 static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) ); 23839 } 23840 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 23841 //=== VK_NV_acquire_winrt_display === 23842 23843 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 23844 template <typename Dispatch> acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const23845 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, 23846 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23847 { 23848 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23849 return static_cast<Result>( d.vkAcquireWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( display ) ) ); 23850 } 23851 # else 23852 template <typename Dispatch> 23853 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const23854 PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 23855 { 23856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23857 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23858 VULKAN_HPP_ASSERT( d.vkAcquireWinrtDisplayNV && "Function <vkAcquireWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); 23859 # endif 23860 23861 VULKAN_HPP_NAMESPACE::Result result = 23862 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 23863 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); 23864 23865 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 23866 } 23867 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 23868 23869 template <typename Dispatch> getWinrtDisplayNV(uint32_t deviceRelativeId,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const23870 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, 23871 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 23872 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23873 { 23874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23875 return static_cast<Result>( 23876 d.vkGetWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 23877 } 23878 23879 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23880 template <typename Dispatch> 23881 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getWinrtDisplayNV(uint32_t deviceRelativeId,Dispatch const & d) const23882 PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const 23883 { 23884 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23885 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23886 VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); 23887 # endif 23888 23889 VULKAN_HPP_NAMESPACE::DisplayKHR display; 23890 VULKAN_HPP_NAMESPACE::Result result = 23891 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 23892 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); 23893 23894 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); 23895 } 23896 23897 # ifndef VULKAN_HPP_NO_SMART_HANDLE 23898 template <typename Dispatch> 23899 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getWinrtDisplayNVUnique(uint32_t deviceRelativeId,Dispatch const & d) const23900 PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const 23901 { 23902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23903 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23904 VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); 23905 # endif 23906 23907 VULKAN_HPP_NAMESPACE::DisplayKHR display; 23908 VULKAN_HPP_NAMESPACE::Result result = 23909 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 23910 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); 23911 23912 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 23913 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, detail::ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 23914 } 23915 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23916 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23917 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 23918 23919 #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) 23920 //=== VK_EXT_directfb_surface === 23921 23922 template <typename Dispatch> createDirectFBSurfaceEXT(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const23923 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, 23924 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23925 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 23926 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23927 { 23928 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23929 return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( static_cast<VkInstance>( m_instance ), 23930 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), 23931 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 23932 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 23933 } 23934 23935 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23936 template <typename Dispatch> 23937 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDirectFBSurfaceEXT(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23938 Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, 23939 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23940 Dispatch const & d ) const 23941 { 23942 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23943 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23944 VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" ); 23945 # endif 23946 23947 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 23948 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT( 23949 m_instance, 23950 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 23951 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23952 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 23953 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); 23954 23955 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 23956 } 23957 23958 # ifndef VULKAN_HPP_NO_SMART_HANDLE 23959 template <typename Dispatch> 23960 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDirectFBSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23961 Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, 23962 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23963 Dispatch const & d ) const 23964 { 23965 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23966 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23967 VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" ); 23968 # endif 23969 23970 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 23971 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT( 23972 m_instance, 23973 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 23974 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23975 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 23976 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); 23977 23978 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 23979 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 23980 } 23981 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23982 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23983 23984 template <typename Dispatch> getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB * dfb,Dispatch const & d) const23985 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, 23986 IDirectFB * dfb, 23987 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23988 { 23989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23990 return static_cast<Bool32>( 23991 d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, dfb ) ); 23992 } 23993 23994 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23995 template <typename Dispatch> 23996 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB & dfb,Dispatch const & d) const23997 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23998 { 23999 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24000 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24001 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT && 24002 "Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> requires <VK_EXT_directfb_surface>" ); 24003 # endif 24004 24005 VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); 24006 24007 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 24008 } 24009 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24010 #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ 24011 24012 //=== VK_EXT_vertex_input_dynamic_state === 24013 24014 template <typename Dispatch> setVertexInputEXT(uint32_t vertexBindingDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,Dispatch const & d) const24015 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, 24016 const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, 24017 uint32_t vertexAttributeDescriptionCount, 24018 const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, 24019 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24020 { 24021 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24022 d.vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 24023 vertexBindingDescriptionCount, 24024 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ), 24025 vertexAttributeDescriptionCount, 24026 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) ); 24027 } 24028 24029 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24030 template <typename Dispatch> setVertexInputEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,Dispatch const & d) const24031 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( 24032 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, 24033 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions, 24034 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24035 { 24036 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24037 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24038 VULKAN_HPP_ASSERT( d.vkCmdSetVertexInputEXT && "Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" ); 24039 # endif 24040 24041 d.vkCmdSetVertexInputEXT( m_commandBuffer, 24042 vertexBindingDescriptions.size(), 24043 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ), 24044 vertexAttributeDescriptions.size(), 24045 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) ); 24046 } 24047 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24048 24049 #if defined( VK_USE_PLATFORM_FUCHSIA ) 24050 //=== VK_FUCHSIA_external_memory === 24051 24052 template <typename Dispatch> 24053 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const24054 Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 24055 zx_handle_t * pZirconHandle, 24056 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24057 { 24058 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24059 return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA( 24060 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); 24061 } 24062 24063 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24064 template <typename Dispatch> 24065 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getMemoryZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const24066 Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const 24067 { 24068 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24069 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24070 VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandleFUCHSIA && "Function <vkGetMemoryZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_memory>" ); 24071 # endif 24072 24073 zx_handle_t zirconHandle; 24074 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24075 d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); 24076 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); 24077 24078 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); 24079 } 24080 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24081 24082 template <typename Dispatch> 24083 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,Dispatch const & d) const24084 Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 24085 zx_handle_t zirconHandle, 24086 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, 24087 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24088 { 24089 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24090 return static_cast<Result>( 24091 d.vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast<VkDevice>( m_device ), 24092 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 24093 zirconHandle, 24094 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) ); 24095 } 24096 24097 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24098 template <typename Dispatch> 24099 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,Dispatch const & d) const24100 Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 24101 zx_handle_t zirconHandle, 24102 Dispatch const & d ) const 24103 { 24104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24105 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24106 VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandlePropertiesFUCHSIA && 24107 "Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> requires <VK_FUCHSIA_external_memory>" ); 24108 # endif 24109 24110 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; 24111 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24112 d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, 24113 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 24114 zirconHandle, 24115 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) ); 24116 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); 24117 24118 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryZirconHandleProperties ) ); 24119 } 24120 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24121 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 24122 24123 #if defined( VK_USE_PLATFORM_FUCHSIA ) 24124 //=== VK_FUCHSIA_external_semaphore === 24125 24126 template <typename Dispatch> importSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,Dispatch const & d) const24127 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( 24128 const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24129 { 24130 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24131 return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 24132 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) ); 24133 } 24134 24135 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24136 template <typename Dispatch> 24137 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,Dispatch const & d) const24138 Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, 24139 Dispatch const & d ) const 24140 { 24141 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24142 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24143 VULKAN_HPP_ASSERT( d.vkImportSemaphoreZirconHandleFUCHSIA && "Function <vkImportSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" ); 24144 # endif 24145 24146 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 24147 m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) ); 24148 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); 24149 24150 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 24151 } 24152 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24153 24154 template <typename Dispatch> 24155 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const24156 Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 24157 zx_handle_t * pZirconHandle, 24158 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24159 { 24160 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24161 return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA( 24162 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); 24163 } 24164 24165 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24166 template <typename Dispatch> 24167 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const24168 Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const 24169 { 24170 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24171 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24172 VULKAN_HPP_ASSERT( d.vkGetSemaphoreZirconHandleFUCHSIA && "Function <vkGetSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" ); 24173 # endif 24174 24175 zx_handle_t zirconHandle; 24176 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24177 d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); 24178 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); 24179 24180 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); 24181 } 24182 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24183 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 24184 24185 #if defined( VK_USE_PLATFORM_FUCHSIA ) 24186 //=== VK_FUCHSIA_buffer_collection === 24187 24188 template <typename Dispatch> 24189 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createBufferCollectionFUCHSIA(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,Dispatch const & d) const24190 Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, 24191 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24192 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, 24193 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24194 { 24195 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24196 return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( static_cast<VkDevice>( m_device ), 24197 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ), 24198 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 24199 reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) ); 24200 } 24201 24202 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24203 template <typename Dispatch> 24204 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type createBufferCollectionFUCHSIA(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24205 Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, 24206 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24207 Dispatch const & d ) const 24208 { 24209 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24210 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24211 VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 24212 # endif 24213 24214 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; 24215 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA( 24216 m_device, 24217 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), 24218 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 24219 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); 24220 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); 24221 24222 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( collection ) ); 24223 } 24224 24225 # ifndef VULKAN_HPP_NO_SMART_HANDLE 24226 template <typename Dispatch> 24227 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type createBufferCollectionFUCHSIAUnique(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24228 Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, 24229 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24230 Dispatch const & d ) const 24231 { 24232 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24233 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24234 VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 24235 # endif 24236 24237 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; 24238 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA( 24239 m_device, 24240 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), 24241 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 24242 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); 24243 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); 24244 24245 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 24246 result, 24247 UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 24248 } 24249 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 24250 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24251 24252 template <typename Dispatch> 24253 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result setBufferCollectionImageConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,Dispatch const & d) const24254 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 24255 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, 24256 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24257 { 24258 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24259 return static_cast<Result>( 24260 d.vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast<VkDevice>( m_device ), 24261 static_cast<VkBufferCollectionFUCHSIA>( collection ), 24262 reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) ); 24263 } 24264 24265 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24266 template <typename Dispatch> 24267 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setBufferCollectionImageConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,Dispatch const & d) const24268 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 24269 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, 24270 Dispatch const & d ) const 24271 { 24272 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24273 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24274 VULKAN_HPP_ASSERT( d.vkSetBufferCollectionImageConstraintsFUCHSIA && 24275 "Function <vkSetBufferCollectionImageConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 24276 # endif 24277 24278 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( 24279 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) ) ); 24280 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); 24281 24282 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 24283 } 24284 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24285 24286 template <typename Dispatch> 24287 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result setBufferCollectionBufferConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,Dispatch const & d) const24288 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 24289 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, 24290 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24291 { 24292 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24293 return static_cast<Result>( 24294 d.vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast<VkDevice>( m_device ), 24295 static_cast<VkBufferCollectionFUCHSIA>( collection ), 24296 reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) ); 24297 } 24298 24299 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24300 template <typename Dispatch> 24301 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setBufferCollectionBufferConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,Dispatch const & d) const24302 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 24303 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, 24304 Dispatch const & d ) const 24305 { 24306 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24307 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24308 VULKAN_HPP_ASSERT( d.vkSetBufferCollectionBufferConstraintsFUCHSIA && 24309 "Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 24310 # endif 24311 24312 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( 24313 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) ) ); 24314 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); 24315 24316 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 24317 } 24318 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24319 24320 template <typename Dispatch> destroyBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const24321 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 24322 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24323 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24324 { 24325 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24326 d.vkDestroyBufferCollectionFUCHSIA( 24327 static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 24328 } 24329 24330 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24331 template <typename Dispatch> destroyBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24332 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 24333 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24334 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24335 { 24336 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24337 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24338 VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 24339 # endif 24340 24341 d.vkDestroyBufferCollectionFUCHSIA( 24342 m_device, 24343 static_cast<VkBufferCollectionFUCHSIA>( collection ), 24344 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 24345 } 24346 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24347 24348 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const24349 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 24350 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24351 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24352 { 24353 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24354 d.vkDestroyBufferCollectionFUCHSIA( 24355 static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 24356 } 24357 24358 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24359 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24360 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 24361 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24362 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24363 { 24364 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24365 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24366 VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 24367 # endif 24368 24369 d.vkDestroyBufferCollectionFUCHSIA( 24370 m_device, 24371 static_cast<VkBufferCollectionFUCHSIA>( collection ), 24372 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 24373 } 24374 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24375 24376 template <typename Dispatch> 24377 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getBufferCollectionPropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,Dispatch const & d) const24378 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 24379 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, 24380 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24381 { 24382 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24383 return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( static_cast<VkDevice>( m_device ), 24384 static_cast<VkBufferCollectionFUCHSIA>( collection ), 24385 reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) ); 24386 } 24387 24388 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24389 template <typename Dispatch> 24390 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type getBufferCollectionPropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Dispatch const & d) const24391 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const 24392 { 24393 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24394 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24395 VULKAN_HPP_ASSERT( d.vkGetBufferCollectionPropertiesFUCHSIA && 24396 "Function <vkGetBufferCollectionPropertiesFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 24397 # endif 24398 24399 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; 24400 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( 24401 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) ) ); 24402 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); 24403 24404 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 24405 } 24406 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24407 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 24408 24409 //=== VK_HUAWEI_subpass_shading === 24410 24411 template <typename Dispatch> getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,Dispatch const & d) const24412 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, 24413 VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, 24414 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24415 { 24416 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24417 return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 24418 static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) ); 24419 } 24420 24421 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24422 template <typename Dispatch> 24423 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Extent2D>::type getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,Dispatch const & d) const24424 Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const 24425 { 24426 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24427 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24428 VULKAN_HPP_ASSERT( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && 24429 "Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> requires <VK_HUAWEI_subpass_shading>" ); 24430 # endif 24431 24432 VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; 24433 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 24434 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) ); 24435 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); 24436 24437 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( maxWorkgroupSize ) ); 24438 } 24439 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24440 24441 template <typename Dispatch> subpassShadingHUAWEI(Dispatch const & d) const24442 VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24443 { 24444 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24445 d.vkCmdSubpassShadingHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ) ); 24446 } 24447 24448 //=== VK_HUAWEI_invocation_mask === 24449 24450 template <typename Dispatch> bindInvocationMaskHUAWEI(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,Dispatch const & d) const24451 VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, 24452 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 24453 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24454 { 24455 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24456 d.vkCmdBindInvocationMaskHUAWEI( 24457 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); 24458 } 24459 24460 //=== VK_NV_external_memory_rdma === 24461 24462 template <typename Dispatch> 24463 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,Dispatch const & d) const24464 Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, 24465 VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, 24466 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24467 { 24468 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24469 return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( static_cast<VkDevice>( m_device ), 24470 reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), 24471 reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) ); 24472 } 24473 24474 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24475 template <typename Dispatch> 24476 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo,Dispatch const & d) const24477 Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const 24478 { 24479 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24480 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24481 VULKAN_HPP_ASSERT( d.vkGetMemoryRemoteAddressNV && "Function <vkGetMemoryRemoteAddressNV> requires <VK_NV_external_memory_rdma>" ); 24482 # endif 24483 24484 VULKAN_HPP_NAMESPACE::RemoteAddressNV address; 24485 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryRemoteAddressNV( 24486 m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) ) ); 24487 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); 24488 24489 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( address ) ); 24490 } 24491 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24492 24493 //=== VK_EXT_pipeline_properties === 24494 24495 template <typename Dispatch> getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo,VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,Dispatch const & d) const24496 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, 24497 VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, 24498 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24499 { 24500 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24501 return static_cast<Result>( d.vkGetPipelinePropertiesEXT( static_cast<VkDevice>( m_device ), 24502 reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), 24503 reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) ); 24504 } 24505 24506 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24507 template <typename Dispatch> 24508 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo,Dispatch const & d) const24509 Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const 24510 { 24511 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24512 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24513 VULKAN_HPP_ASSERT( d.vkGetPipelinePropertiesEXT && "Function <vkGetPipelinePropertiesEXT> requires <VK_EXT_pipeline_properties>" ); 24514 # endif 24515 24516 VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; 24517 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelinePropertiesEXT( 24518 m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) ) ); 24519 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); 24520 24521 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineProperties ) ); 24522 } 24523 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24524 24525 //=== VK_EXT_extended_dynamic_state2 === 24526 24527 template <typename Dispatch> setPatchControlPointsEXT(uint32_t patchControlPoints,Dispatch const & d) const24528 VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24529 { 24530 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24531 d.vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints ); 24532 } 24533 24534 template <typename Dispatch> setRasterizerDiscardEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const24535 VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, 24536 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24537 { 24538 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24539 d.vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) ); 24540 } 24541 24542 template <typename Dispatch> setDepthBiasEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const24543 VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24544 { 24545 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24546 d.vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) ); 24547 } 24548 24549 template <typename Dispatch> setLogicOpEXT(VULKAN_HPP_NAMESPACE::LogicOp logicOp,Dispatch const & d) const24550 VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24551 { 24552 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24553 d.vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) ); 24554 } 24555 24556 template <typename Dispatch> setPrimitiveRestartEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const24557 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, 24558 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24559 { 24560 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24561 d.vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) ); 24562 } 24563 24564 #if defined( VK_USE_PLATFORM_SCREEN_QNX ) 24565 //=== VK_QNX_screen_surface === 24566 24567 template <typename Dispatch> createScreenSurfaceQNX(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const24568 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, 24569 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24570 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 24571 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24572 { 24573 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24574 return static_cast<Result>( d.vkCreateScreenSurfaceQNX( static_cast<VkInstance>( m_instance ), 24575 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ), 24576 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 24577 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 24578 } 24579 24580 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24581 template <typename Dispatch> 24582 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createScreenSurfaceQNX(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24583 Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, 24584 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24585 Dispatch const & d ) const 24586 { 24587 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24588 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24589 VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" ); 24590 # endif 24591 24592 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 24593 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX( 24594 m_instance, 24595 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 24596 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 24597 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 24598 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); 24599 24600 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 24601 } 24602 24603 # ifndef VULKAN_HPP_NO_SMART_HANDLE 24604 template <typename Dispatch> 24605 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createScreenSurfaceQNXUnique(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24606 Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, 24607 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24608 Dispatch const & d ) const 24609 { 24610 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24611 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24612 VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" ); 24613 # endif 24614 24615 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 24616 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX( 24617 m_instance, 24618 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 24619 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 24620 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 24621 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); 24622 24623 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 24624 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 24625 } 24626 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 24627 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24628 24629 template <typename Dispatch> getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window * window,Dispatch const & d) const24630 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, 24631 struct _screen_window * window, 24632 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24633 { 24634 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24635 return static_cast<Bool32>( 24636 d.vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, window ) ); 24637 } 24638 24639 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24640 template <typename Dispatch> 24641 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window & window,Dispatch const & d) const24642 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24643 { 24644 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24645 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24646 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceScreenPresentationSupportQNX && 24647 "Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> requires <VK_QNX_screen_surface>" ); 24648 # endif 24649 24650 VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); 24651 24652 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 24653 } 24654 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24655 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ 24656 24657 //=== VK_EXT_color_write_enable === 24658 24659 template <typename Dispatch> setColorWriteEnableEXT(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,Dispatch const & d) const24660 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, 24661 const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, 24662 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24663 { 24664 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24665 d.vkCmdSetColorWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) ); 24666 } 24667 24668 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24669 template <typename Dispatch> setColorWriteEnableEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,Dispatch const & d) const24670 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables, 24671 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24672 { 24673 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24674 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24675 VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteEnableEXT && "Function <vkCmdSetColorWriteEnableEXT> requires <VK_EXT_color_write_enable>" ); 24676 # endif 24677 24678 d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) ); 24679 } 24680 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24681 24682 //=== VK_KHR_ray_tracing_maintenance1 === 24683 24684 template <typename Dispatch> traceRaysIndirect2KHR(VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const24685 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 24686 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24687 { 24688 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24689 d.vkCmdTraceRaysIndirect2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 24690 } 24691 24692 //=== VK_EXT_multi_draw === 24693 24694 template <typename Dispatch> drawMultiEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,Dispatch const & d) const24695 VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, 24696 const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, 24697 uint32_t instanceCount, 24698 uint32_t firstInstance, 24699 uint32_t stride, 24700 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24701 { 24702 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24703 d.vkCmdDrawMultiEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 24704 drawCount, 24705 reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), 24706 instanceCount, 24707 firstInstance, 24708 stride ); 24709 } 24710 24711 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24712 template <typename Dispatch> drawMultiEXT(VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,uint32_t instanceCount,uint32_t firstInstance,Dispatch const & d) const24713 VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo, 24714 uint32_t instanceCount, 24715 uint32_t firstInstance, 24716 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24717 { 24718 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24719 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24720 VULKAN_HPP_ASSERT( d.vkCmdDrawMultiEXT && "Function <vkCmdDrawMultiEXT> requires <VK_EXT_multi_draw>" ); 24721 # endif 24722 24723 d.vkCmdDrawMultiEXT( m_commandBuffer, 24724 vertexInfo.size(), 24725 reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ), 24726 instanceCount, 24727 firstInstance, 24728 vertexInfo.stride() ); 24729 } 24730 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24731 24732 template <typename Dispatch> drawMultiIndexedEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,const int32_t * pVertexOffset,Dispatch const & d) const24733 VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, 24734 const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, 24735 uint32_t instanceCount, 24736 uint32_t firstInstance, 24737 uint32_t stride, 24738 const int32_t * pVertexOffset, 24739 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24740 { 24741 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24742 d.vkCmdDrawMultiIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 24743 drawCount, 24744 reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), 24745 instanceCount, 24746 firstInstance, 24747 stride, 24748 pVertexOffset ); 24749 } 24750 24751 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24752 template <typename Dispatch> 24753 VULKAN_HPP_INLINE void drawMultiIndexedEXT(VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,uint32_t instanceCount,uint32_t firstInstance,Optional<const int32_t> vertexOffset,Dispatch const & d) const24754 CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo, 24755 uint32_t instanceCount, 24756 uint32_t firstInstance, 24757 Optional<const int32_t> vertexOffset, 24758 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24759 { 24760 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24761 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24762 VULKAN_HPP_ASSERT( d.vkCmdDrawMultiIndexedEXT && "Function <vkCmdDrawMultiIndexedEXT> requires <VK_EXT_multi_draw>" ); 24763 # endif 24764 24765 d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, 24766 indexInfo.size(), 24767 reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ), 24768 instanceCount, 24769 firstInstance, 24770 indexInfo.stride(), 24771 static_cast<const int32_t *>( vertexOffset ) ); 24772 } 24773 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24774 24775 //=== VK_EXT_opacity_micromap === 24776 24777 template <typename Dispatch> createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,Dispatch const & d) const24778 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, 24779 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24780 VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, 24781 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24782 { 24783 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24784 return static_cast<Result>( d.vkCreateMicromapEXT( static_cast<VkDevice>( m_device ), 24785 reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ), 24786 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 24787 reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) ); 24788 } 24789 24790 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24791 template <typename Dispatch> 24792 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24793 Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, 24794 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24795 Dispatch const & d ) const 24796 { 24797 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24798 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24799 VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24800 # endif 24801 24802 VULKAN_HPP_NAMESPACE::MicromapEXT micromap; 24803 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24804 d.vkCreateMicromapEXT( m_device, 24805 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), 24806 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 24807 reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); 24808 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); 24809 24810 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( micromap ) ); 24811 } 24812 24813 # ifndef VULKAN_HPP_NO_SMART_HANDLE 24814 template <typename Dispatch> 24815 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type createMicromapEXTUnique(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24816 Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, 24817 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24818 Dispatch const & d ) const 24819 { 24820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24821 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24822 VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24823 # endif 24824 24825 VULKAN_HPP_NAMESPACE::MicromapEXT micromap; 24826 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24827 d.vkCreateMicromapEXT( m_device, 24828 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), 24829 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 24830 reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); 24831 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); 24832 24833 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 24834 result, UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>( micromap, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 24835 } 24836 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 24837 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24838 24839 template <typename Dispatch> destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const24840 VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 24841 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24843 { 24844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24845 d.vkDestroyMicromapEXT( 24846 static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 24847 } 24848 24849 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24850 template <typename Dispatch> destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24851 VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 24852 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24853 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24854 { 24855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24856 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24857 VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24858 # endif 24859 24860 d.vkDestroyMicromapEXT( m_device, 24861 static_cast<VkMicromapEXT>( micromap ), 24862 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 24863 } 24864 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24865 24866 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const24867 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 24868 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 24869 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24870 { 24871 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24872 d.vkDestroyMicromapEXT( 24873 static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 24874 } 24875 24876 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24877 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const24878 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 24879 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 24880 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24881 { 24882 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24883 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24884 VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24885 # endif 24886 24887 d.vkDestroyMicromapEXT( m_device, 24888 static_cast<VkMicromapEXT>( micromap ), 24889 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 24890 } 24891 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24892 24893 template <typename Dispatch> buildMicromapsEXT(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,Dispatch const & d) const24894 VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount, 24895 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, 24896 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24897 { 24898 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24899 d.vkCmdBuildMicromapsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ); 24900 } 24901 24902 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24903 template <typename Dispatch> buildMicromapsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,Dispatch const & d) const24904 VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, 24905 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24906 { 24907 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24908 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24909 VULKAN_HPP_ASSERT( d.vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" ); 24910 # endif 24911 24912 d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ); 24913 } 24914 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24915 24916 template <typename Dispatch> buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,uint32_t infoCount,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,Dispatch const & d) const24917 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24918 uint32_t infoCount, 24919 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, 24920 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24921 { 24922 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24923 return static_cast<Result>( d.vkBuildMicromapsEXT( static_cast<VkDevice>( m_device ), 24924 static_cast<VkDeferredOperationKHR>( deferredOperation ), 24925 infoCount, 24926 reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) ); 24927 } 24928 24929 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24930 template <typename Dispatch> 24931 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,Dispatch const & d) const24932 Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24933 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, 24934 Dispatch const & d ) const 24935 { 24936 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24937 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24938 VULKAN_HPP_ASSERT( d.vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" ); 24939 # endif 24940 24941 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBuildMicromapsEXT( 24942 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ) ); 24943 VULKAN_HPP_NAMESPACE::detail::resultCheck( 24944 result, 24945 VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", 24946 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 24947 24948 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 24949 } 24950 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24951 24952 template <typename Dispatch> copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,Dispatch const & d) const24953 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24954 const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, 24955 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24956 { 24957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24958 return static_cast<Result>( d.vkCopyMicromapEXT( 24959 static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) ); 24960 } 24961 24962 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24963 template <typename Dispatch> copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,Dispatch const & d) const24964 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24965 const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, 24966 Dispatch const & d ) const 24967 { 24968 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24969 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24970 VULKAN_HPP_ASSERT( d.vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 24971 # endif 24972 24973 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24974 d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ) ); 24975 VULKAN_HPP_NAMESPACE::detail::resultCheck( 24976 result, 24977 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", 24978 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 24979 24980 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 24981 } 24982 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24983 24984 template <typename Dispatch> copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,Dispatch const & d) const24985 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 24986 const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, 24987 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24988 { 24989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24990 return static_cast<Result>( d.vkCopyMicromapToMemoryEXT( static_cast<VkDevice>( m_device ), 24991 static_cast<VkDeferredOperationKHR>( deferredOperation ), 24992 reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) ); 24993 } 24994 24995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24996 template <typename Dispatch> copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,Dispatch const & d) const24997 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT( 24998 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const 24999 { 25000 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25001 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25002 VULKAN_HPP_ASSERT( d.vkCopyMicromapToMemoryEXT && "Function <vkCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" ); 25003 # endif 25004 25005 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMicromapToMemoryEXT( 25006 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ) ); 25007 VULKAN_HPP_NAMESPACE::detail::resultCheck( 25008 result, 25009 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", 25010 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 25011 25012 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 25013 } 25014 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25015 25016 template <typename Dispatch> copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,Dispatch const & d) const25017 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 25018 const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, 25019 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25020 { 25021 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25022 return static_cast<Result>( d.vkCopyMemoryToMicromapEXT( static_cast<VkDevice>( m_device ), 25023 static_cast<VkDeferredOperationKHR>( deferredOperation ), 25024 reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) ); 25025 } 25026 25027 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25028 template <typename Dispatch> copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,Dispatch const & d) const25029 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT( 25030 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const 25031 { 25032 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25033 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25034 VULKAN_HPP_ASSERT( d.vkCopyMemoryToMicromapEXT && "Function <vkCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 25035 # endif 25036 25037 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToMicromapEXT( 25038 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ) ); 25039 VULKAN_HPP_NAMESPACE::detail::resultCheck( 25040 result, 25041 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", 25042 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 25043 25044 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 25045 } 25046 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25047 25048 template <typename Dispatch> writeMicromapsPropertiesEXT(uint32_t micromapCount,const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,void * pData,size_t stride,Dispatch const & d) const25049 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount, 25050 const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, 25051 VULKAN_HPP_NAMESPACE::QueryType queryType, 25052 size_t dataSize, 25053 void * pData, 25054 size_t stride, 25055 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25056 { 25057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25058 return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ), 25059 micromapCount, 25060 reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), 25061 static_cast<VkQueryType>( queryType ), 25062 dataSize, 25063 pData, 25064 stride ) ); 25065 } 25066 25067 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25068 template <typename DataType, 25069 typename DataTypeAllocator, 25070 typename Dispatch, 25071 typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type> 25072 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeMicromapsPropertiesEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,size_t stride,Dispatch const & d) const25073 Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 25074 VULKAN_HPP_NAMESPACE::QueryType queryType, 25075 size_t dataSize, 25076 size_t stride, 25077 Dispatch const & d ) const 25078 { 25079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25080 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25081 VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); 25082 # endif 25083 25084 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 25085 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 25086 VULKAN_HPP_NAMESPACE::Result result = 25087 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device, 25088 micromaps.size(), 25089 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 25090 static_cast<VkQueryType>( queryType ), 25091 data.size() * sizeof( DataType ), 25092 reinterpret_cast<void *>( data.data() ), 25093 stride ) ); 25094 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); 25095 25096 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 25097 } 25098 25099 template <typename DataType, typename Dispatch> 25100 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type writeMicromapsPropertyEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t stride,Dispatch const & d) const25101 Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 25102 VULKAN_HPP_NAMESPACE::QueryType queryType, 25103 size_t stride, 25104 Dispatch const & d ) const 25105 { 25106 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25107 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25108 VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); 25109 # endif 25110 25111 DataType data; 25112 VULKAN_HPP_NAMESPACE::Result result = 25113 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device, 25114 micromaps.size(), 25115 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 25116 static_cast<VkQueryType>( queryType ), 25117 sizeof( DataType ), 25118 reinterpret_cast<void *>( &data ), 25119 stride ) ); 25120 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); 25121 25122 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 25123 } 25124 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25125 25126 template <typename Dispatch> copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,Dispatch const & d) const25127 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25128 { 25129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25130 d.vkCmdCopyMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ); 25131 } 25132 25133 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25134 template <typename Dispatch> copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,Dispatch const & d) const25135 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25136 { 25137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25138 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25139 VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 25140 # endif 25141 25142 d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ); 25143 } 25144 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25145 25146 template <typename Dispatch> copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,Dispatch const & d) const25147 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, 25148 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25149 { 25150 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25151 d.vkCmdCopyMicromapToMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ); 25152 } 25153 25154 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25155 template <typename Dispatch> copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,Dispatch const & d) const25156 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, 25157 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25158 { 25159 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25160 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25161 VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapToMemoryEXT && "Function <vkCmdCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" ); 25162 # endif 25163 25164 d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ); 25165 } 25166 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25167 25168 template <typename Dispatch> copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,Dispatch const & d) const25169 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, 25170 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25171 { 25172 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25173 d.vkCmdCopyMemoryToMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ); 25174 } 25175 25176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25177 template <typename Dispatch> copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,Dispatch const & d) const25178 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, 25179 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25180 { 25181 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25182 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25183 VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToMicromapEXT && "Function <vkCmdCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 25184 # endif 25185 25186 d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ); 25187 } 25188 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25189 25190 template <typename Dispatch> writeMicromapsPropertiesEXT(uint32_t micromapCount,const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const25191 VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount, 25192 const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, 25193 VULKAN_HPP_NAMESPACE::QueryType queryType, 25194 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 25195 uint32_t firstQuery, 25196 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25197 { 25198 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25199 d.vkCmdWriteMicromapsPropertiesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 25200 micromapCount, 25201 reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), 25202 static_cast<VkQueryType>( queryType ), 25203 static_cast<VkQueryPool>( queryPool ), 25204 firstQuery ); 25205 } 25206 25207 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25208 template <typename Dispatch> 25209 VULKAN_HPP_INLINE void writeMicromapsPropertiesEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const25210 CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 25211 VULKAN_HPP_NAMESPACE::QueryType queryType, 25212 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 25213 uint32_t firstQuery, 25214 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25215 { 25216 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25217 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25218 VULKAN_HPP_ASSERT( d.vkCmdWriteMicromapsPropertiesEXT && "Function <vkCmdWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); 25219 # endif 25220 25221 d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, 25222 micromaps.size(), 25223 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 25224 static_cast<VkQueryType>( queryType ), 25225 static_cast<VkQueryPool>( queryPool ), 25226 firstQuery ); 25227 } 25228 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25229 25230 template <typename Dispatch> getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,Dispatch const & d) const25231 VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, 25232 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, 25233 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25234 { 25235 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25236 d.vkGetDeviceMicromapCompatibilityEXT( static_cast<VkDevice>( m_device ), 25237 reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ), 25238 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); 25239 } 25240 25241 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25242 template <typename Dispatch> 25243 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo,Dispatch const & d) const25244 Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25245 { 25246 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25247 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25248 VULKAN_HPP_ASSERT( d.vkGetDeviceMicromapCompatibilityEXT && "Function <vkGetDeviceMicromapCompatibilityEXT> requires <VK_EXT_opacity_micromap>" ); 25249 # endif 25250 25251 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; 25252 d.vkGetDeviceMicromapCompatibilityEXT( m_device, 25253 reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ), 25254 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); 25255 25256 return compatibility; 25257 } 25258 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25259 25260 template <typename Dispatch> getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,Dispatch const & d) const25261 VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 25262 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, 25263 VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, 25264 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25265 { 25266 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25267 d.vkGetMicromapBuildSizesEXT( static_cast<VkDevice>( m_device ), 25268 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 25269 reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ), 25270 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) ); 25271 } 25272 25273 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25274 template <typename Dispatch> 25275 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,Dispatch const & d) const25276 Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 25277 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, 25278 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25279 { 25280 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25281 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25282 VULKAN_HPP_ASSERT( d.vkGetMicromapBuildSizesEXT && "Function <vkGetMicromapBuildSizesEXT> requires <VK_EXT_opacity_micromap>" ); 25283 # endif 25284 25285 VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; 25286 d.vkGetMicromapBuildSizesEXT( m_device, 25287 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 25288 reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ), 25289 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) ); 25290 25291 return sizeInfo; 25292 } 25293 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25294 25295 //=== VK_HUAWEI_cluster_culling_shader === 25296 25297 template <typename Dispatch> 25298 VULKAN_HPP_INLINE void drawClusterHUAWEI(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const25299 CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25300 { 25301 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25302 d.vkCmdDrawClusterHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); 25303 } 25304 25305 template <typename Dispatch> drawClusterIndirectHUAWEI(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const25306 VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, 25307 VULKAN_HPP_NAMESPACE::DeviceSize offset, 25308 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25309 { 25310 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25311 d.vkCmdDrawClusterIndirectHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); 25312 } 25313 25314 //=== VK_EXT_pageable_device_local_memory === 25315 25316 template <typename Dispatch> setMemoryPriorityEXT(VULKAN_HPP_NAMESPACE::DeviceMemory memory,float priority,Dispatch const & d) const25317 VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25318 { 25319 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25320 d.vkSetDeviceMemoryPriorityEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), priority ); 25321 } 25322 25323 //=== VK_KHR_maintenance4 === 25324 25325 template <typename Dispatch> getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const25326 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, 25327 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 25328 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25329 { 25330 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25331 d.vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), 25332 reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), 25333 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 25334 } 25335 25336 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25337 template <typename Dispatch> 25338 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const25339 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25340 { 25341 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25342 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25343 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && 25344 "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 25345 # endif 25346 25347 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 25348 d.vkGetDeviceBufferMemoryRequirementsKHR( 25349 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 25350 25351 return memoryRequirements; 25352 } 25353 25354 template <typename X, typename Y, typename... Z, typename Dispatch> 25355 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const25356 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25357 { 25358 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25359 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25360 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && 25361 "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 25362 # endif 25363 25364 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 25365 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 25366 d.vkGetDeviceBufferMemoryRequirementsKHR( 25367 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 25368 25369 return structureChain; 25370 } 25371 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25372 25373 template <typename Dispatch> getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const25374 VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 25375 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 25376 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25377 { 25378 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25379 d.vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), 25380 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 25381 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 25382 } 25383 25384 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25385 template <typename Dispatch> 25386 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const25387 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25388 { 25389 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25390 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25391 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && 25392 "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 25393 # endif 25394 25395 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 25396 d.vkGetDeviceImageMemoryRequirementsKHR( 25397 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 25398 25399 return memoryRequirements; 25400 } 25401 25402 template <typename X, typename Y, typename... Z, typename Dispatch> 25403 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const25404 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25405 { 25406 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25407 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25408 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && 25409 "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 25410 # endif 25411 25412 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 25413 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 25414 d.vkGetDeviceImageMemoryRequirementsKHR( 25415 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 25416 25417 return structureChain; 25418 } 25419 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25420 25421 template <typename Dispatch> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const25422 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 25423 uint32_t * pSparseMemoryRequirementCount, 25424 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 25425 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25426 { 25427 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25428 d.vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), 25429 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 25430 pSparseMemoryRequirementCount, 25431 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 25432 } 25433 25434 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25435 template <typename SparseImageMemoryRequirements2Allocator, 25436 typename Dispatch, 25437 typename std::enable_if< 25438 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 25439 int>::type> 25440 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const25441 Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const 25442 { 25443 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25444 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25445 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && 25446 "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 25447 # endif 25448 25449 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 25450 uint32_t sparseMemoryRequirementCount; 25451 d.vkGetDeviceImageSparseMemoryRequirementsKHR( 25452 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 25453 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 25454 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 25455 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 25456 &sparseMemoryRequirementCount, 25457 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 25458 25459 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 25460 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 25461 { 25462 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 25463 } 25464 return sparseMemoryRequirements; 25465 } 25466 25467 template <typename SparseImageMemoryRequirements2Allocator, 25468 typename Dispatch, 25469 typename std::enable_if< 25470 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 25471 int>::type> 25472 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const25473 Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, 25474 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 25475 Dispatch const & d ) const 25476 { 25477 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25478 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25479 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && 25480 "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 25481 # endif 25482 25483 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 25484 sparseImageMemoryRequirements2Allocator ); 25485 uint32_t sparseMemoryRequirementCount; 25486 d.vkGetDeviceImageSparseMemoryRequirementsKHR( 25487 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 25488 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 25489 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 25490 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 25491 &sparseMemoryRequirementCount, 25492 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 25493 25494 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 25495 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 25496 { 25497 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 25498 } 25499 return sparseMemoryRequirements; 25500 } 25501 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25502 25503 //=== VK_VALVE_descriptor_set_host_mapping === 25504 25505 template <typename Dispatch> getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping,Dispatch const & d) const25506 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, 25507 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, 25508 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25509 { 25510 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25511 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast<VkDevice>( m_device ), 25512 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ), 25513 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) ); 25514 } 25515 25516 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25517 template <typename Dispatch> 25518 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference,Dispatch const & d) const25519 Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, 25520 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25521 { 25522 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25523 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25524 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutHostMappingInfoVALVE && 25525 "Function <vkGetDescriptorSetLayoutHostMappingInfoVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" ); 25526 # endif 25527 25528 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; 25529 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, 25530 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ), 25531 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) ); 25532 25533 return hostMapping; 25534 } 25535 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25536 25537 template <typename Dispatch> 25538 VULKAN_HPP_INLINE void getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,void ** ppData,Dispatch const & d) const25539 Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25540 { 25541 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25542 d.vkGetDescriptorSetHostMappingVALVE( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( descriptorSet ), ppData ); 25543 } 25544 25545 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25546 template <typename Dispatch> getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,Dispatch const & d) const25547 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 25548 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25549 { 25550 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25551 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25552 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetHostMappingVALVE && 25553 "Function <vkGetDescriptorSetHostMappingVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" ); 25554 # endif 25555 25556 void * pData; 25557 d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData ); 25558 25559 return pData; 25560 } 25561 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25562 25563 //=== VK_NV_copy_memory_indirect === 25564 25565 template <typename Dispatch> copyMemoryIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t copyCount,uint32_t stride,Dispatch const & d) const25566 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 25567 uint32_t copyCount, 25568 uint32_t stride, 25569 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25570 { 25571 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25572 d.vkCmdCopyMemoryIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride ); 25573 } 25574 25575 template <typename Dispatch> copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t copyCount,uint32_t stride,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources,Dispatch const & d) const25576 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 25577 uint32_t copyCount, 25578 uint32_t stride, 25579 VULKAN_HPP_NAMESPACE::Image dstImage, 25580 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 25581 const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, 25582 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25583 { 25584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25585 d.vkCmdCopyMemoryToImageIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 25586 static_cast<VkDeviceAddress>( copyBufferAddress ), 25587 copyCount, 25588 stride, 25589 static_cast<VkImage>( dstImage ), 25590 static_cast<VkImageLayout>( dstImageLayout ), 25591 reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) ); 25592 } 25593 25594 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25595 template <typename Dispatch> 25596 VULKAN_HPP_INLINE void copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t stride,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources,Dispatch const & d) const25597 CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 25598 uint32_t stride, 25599 VULKAN_HPP_NAMESPACE::Image dstImage, 25600 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 25601 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources, 25602 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25603 { 25604 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25605 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25606 VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToImageIndirectNV && "Function <vkCmdCopyMemoryToImageIndirectNV> requires <VK_NV_copy_memory_indirect>" ); 25607 # endif 25608 25609 d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, 25610 static_cast<VkDeviceAddress>( copyBufferAddress ), 25611 imageSubresources.size(), 25612 stride, 25613 static_cast<VkImage>( dstImage ), 25614 static_cast<VkImageLayout>( dstImageLayout ), 25615 reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) ); 25616 } 25617 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25618 25619 //=== VK_NV_memory_decompression === 25620 25621 template <typename Dispatch> decompressMemoryNV(uint32_t decompressRegionCount,const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions,Dispatch const & d) const25622 VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, 25623 const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, 25624 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25625 { 25626 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25627 d.vkCmdDecompressMemoryNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 25628 decompressRegionCount, 25629 reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) ); 25630 } 25631 25632 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25633 template <typename Dispatch> 25634 VULKAN_HPP_INLINE void decompressMemoryNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions,Dispatch const & d) const25635 CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions, 25636 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25637 { 25638 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25639 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25640 VULKAN_HPP_ASSERT( d.vkCmdDecompressMemoryNV && "Function <vkCmdDecompressMemoryNV> requires <VK_NV_memory_decompression>" ); 25641 # endif 25642 25643 d.vkCmdDecompressMemoryNV( 25644 m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) ); 25645 } 25646 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25647 25648 template <typename Dispatch> decompressMemoryIndirectCountNV(VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,uint32_t stride,Dispatch const & d) const25649 VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, 25650 VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, 25651 uint32_t stride, 25652 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25653 { 25654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25655 d.vkCmdDecompressMemoryIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 25656 static_cast<VkDeviceAddress>( indirectCommandsAddress ), 25657 static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), 25658 stride ); 25659 } 25660 25661 //=== VK_NV_device_generated_commands_compute === 25662 25663 template <typename Dispatch> getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const25664 VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, 25665 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 25666 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25667 { 25668 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25669 d.vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ), 25670 reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), 25671 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 25672 } 25673 25674 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25675 template <typename Dispatch> 25676 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Dispatch const & d) const25677 Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 25678 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25679 { 25680 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25681 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25682 VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && 25683 "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" ); 25684 # endif 25685 25686 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 25687 d.vkGetPipelineIndirectMemoryRequirementsNV( 25688 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 25689 25690 return memoryRequirements; 25691 } 25692 25693 template <typename X, typename Y, typename... Z, typename Dispatch> 25694 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Dispatch const & d) const25695 Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 25696 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25697 { 25698 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25699 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25700 VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && 25701 "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" ); 25702 # endif 25703 25704 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 25705 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 25706 d.vkGetPipelineIndirectMemoryRequirementsNV( 25707 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 25708 25709 return structureChain; 25710 } 25711 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25712 25713 template <typename Dispatch> updatePipelineIndirectBufferNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const25714 VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 25715 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 25716 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25717 { 25718 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25719 d.vkCmdUpdatePipelineIndirectBufferNV( 25720 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); 25721 } 25722 25723 template <typename Dispatch> getPipelineIndirectAddressNV(const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo,Dispatch const & d) const25724 VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, 25725 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25726 { 25727 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25728 return static_cast<DeviceAddress>( 25729 d.vkGetPipelineIndirectDeviceAddressNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) ); 25730 } 25731 25732 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25733 template <typename Dispatch> 25734 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV(const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info,Dispatch const & d) const25735 Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25736 { 25737 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25738 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25739 VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectDeviceAddressNV && 25740 "Function <vkGetPipelineIndirectDeviceAddressNV> requires <VK_NV_device_generated_commands_compute>" ); 25741 # endif 25742 25743 VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) ); 25744 25745 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 25746 } 25747 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25748 25749 //=== VK_EXT_extended_dynamic_state3 === 25750 25751 template <typename Dispatch> setDepthClampEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable,Dispatch const & d) const25752 VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25753 { 25754 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25755 d.vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) ); 25756 } 25757 25758 template <typename Dispatch> setPolygonModeEXT(VULKAN_HPP_NAMESPACE::PolygonMode polygonMode,Dispatch const & d) const25759 VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25760 { 25761 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25762 d.vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) ); 25763 } 25764 25765 template <typename Dispatch> setRasterizationSamplesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,Dispatch const & d) const25766 VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, 25767 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25768 { 25769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25770 d.vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( rasterizationSamples ) ); 25771 } 25772 25773 template <typename Dispatch> setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,Dispatch const & d) const25774 VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 25775 const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, 25776 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25777 { 25778 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25779 d.vkCmdSetSampleMaskEXT( 25780 static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) ); 25781 } 25782 25783 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25784 template <typename Dispatch> setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,Dispatch const & d) const25785 VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 25786 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask, 25787 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 25788 { 25789 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25790 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25791 VULKAN_HPP_ASSERT( d.vkCmdSetSampleMaskEXT && "Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25792 # endif 25793 # ifdef VULKAN_HPP_NO_EXCEPTIONS 25794 VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast<uint32_t>( samples ) + 31 ) / 32 ); 25795 # else 25796 if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 ) 25797 { 25798 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32" ); 25799 } 25800 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 25801 25802 d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) ); 25803 } 25804 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25805 25806 template <typename Dispatch> setAlphaToCoverageEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,Dispatch const & d) const25807 VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, 25808 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25809 { 25810 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25811 d.vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) ); 25812 } 25813 25814 template <typename Dispatch> setAlphaToOneEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable,Dispatch const & d) const25815 VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25816 { 25817 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25818 d.vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) ); 25819 } 25820 25821 template <typename Dispatch> setLogicOpEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable,Dispatch const & d) const25822 VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25823 { 25824 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25825 d.vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) ); 25826 } 25827 25828 template <typename Dispatch> setColorBlendEnableEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,Dispatch const & d) const25829 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, 25830 uint32_t attachmentCount, 25831 const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, 25832 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25833 { 25834 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25835 d.vkCmdSetColorBlendEnableEXT( 25836 static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) ); 25837 } 25838 25839 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25840 template <typename Dispatch> setColorBlendEnableEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,Dispatch const & d) const25841 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, 25842 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables, 25843 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25844 { 25845 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25846 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25847 VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEnableEXT && 25848 "Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25849 # endif 25850 25851 d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) ); 25852 } 25853 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25854 25855 template <typename Dispatch> setColorBlendEquationEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,Dispatch const & d) const25856 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, 25857 uint32_t attachmentCount, 25858 const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, 25859 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25860 { 25861 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25862 d.vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 25863 firstAttachment, 25864 attachmentCount, 25865 reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) ); 25866 } 25867 25868 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25869 template <typename Dispatch> 25870 VULKAN_HPP_INLINE void setColorBlendEquationEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,Dispatch const & d) const25871 CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, 25872 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations, 25873 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25874 { 25875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25876 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25877 VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEquationEXT && 25878 "Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25879 # endif 25880 25881 d.vkCmdSetColorBlendEquationEXT( 25882 m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) ); 25883 } 25884 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25885 25886 template <typename Dispatch> setColorWriteMaskEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,Dispatch const & d) const25887 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, 25888 uint32_t attachmentCount, 25889 const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, 25890 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25891 { 25892 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25893 d.vkCmdSetColorWriteMaskEXT( 25894 static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) ); 25895 } 25896 25897 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25898 template <typename Dispatch> 25899 VULKAN_HPP_INLINE void setColorWriteMaskEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,Dispatch const & d) const25900 CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, 25901 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks, 25902 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25903 { 25904 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25905 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25906 VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteMaskEXT && 25907 "Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25908 # endif 25909 25910 d.vkCmdSetColorWriteMaskEXT( 25911 m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) ); 25912 } 25913 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25914 25915 template <typename Dispatch> setTessellationDomainOriginEXT(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,Dispatch const & d) const25916 VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, 25917 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25918 { 25919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25920 d.vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkTessellationDomainOrigin>( domainOrigin ) ); 25921 } 25922 25923 template <typename Dispatch> setRasterizationStreamEXT(uint32_t rasterizationStream,Dispatch const & d) const25924 VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25925 { 25926 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25927 d.vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream ); 25928 } 25929 25930 template <typename Dispatch> 25931 VULKAN_HPP_INLINE void setConservativeRasterizationModeEXT(VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,Dispatch const & d) const25932 CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, 25933 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25934 { 25935 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25936 d.vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 25937 static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) ); 25938 } 25939 25940 template <typename Dispatch> setExtraPrimitiveOverestimationSizeEXT(float extraPrimitiveOverestimationSize,Dispatch const & d) const25941 VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, 25942 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25943 { 25944 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25945 d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize ); 25946 } 25947 25948 template <typename Dispatch> setDepthClipEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable,Dispatch const & d) const25949 VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25950 { 25951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25952 d.vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) ); 25953 } 25954 25955 template <typename Dispatch> setSampleLocationsEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,Dispatch const & d) const25956 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, 25957 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25958 { 25959 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25960 d.vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) ); 25961 } 25962 25963 template <typename Dispatch> setColorBlendAdvancedEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,Dispatch const & d) const25964 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, 25965 uint32_t attachmentCount, 25966 const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, 25967 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25968 { 25969 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25970 d.vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 25971 firstAttachment, 25972 attachmentCount, 25973 reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) ); 25974 } 25975 25976 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25977 template <typename Dispatch> 25978 VULKAN_HPP_INLINE void setColorBlendAdvancedEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,Dispatch const & d) const25979 CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, 25980 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced, 25981 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25982 { 25983 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25984 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25985 VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendAdvancedEXT && 25986 "Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 25987 # endif 25988 25989 d.vkCmdSetColorBlendAdvancedEXT( 25990 m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) ); 25991 } 25992 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25993 25994 template <typename Dispatch> setProvokingVertexModeEXT(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,Dispatch const & d) const25995 VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, 25996 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25997 { 25998 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25999 d.vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) ); 26000 } 26001 26002 template <typename Dispatch> setLineRasterizationModeEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,Dispatch const & d) const26003 VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, 26004 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26005 { 26006 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26007 d.vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) ); 26008 } 26009 26010 template <typename Dispatch> setLineStippleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable,Dispatch const & d) const26011 VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26012 { 26013 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26014 d.vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) ); 26015 } 26016 26017 template <typename Dispatch> setDepthClipNegativeOneToOneEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,Dispatch const & d) const26018 VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, 26019 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26020 { 26021 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26022 d.vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) ); 26023 } 26024 26025 template <typename Dispatch> setViewportWScalingEnableNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,Dispatch const & d) const26026 VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, 26027 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26028 { 26029 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26030 d.vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) ); 26031 } 26032 26033 template <typename Dispatch> setViewportSwizzleNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,Dispatch const & d) const26034 VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, 26035 uint32_t viewportCount, 26036 const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, 26037 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26038 { 26039 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26040 d.vkCmdSetViewportSwizzleNV( 26041 static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) ); 26042 } 26043 26044 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26045 template <typename Dispatch> 26046 VULKAN_HPP_INLINE void setViewportSwizzleNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,Dispatch const & d) const26047 CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, 26048 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles, 26049 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26050 { 26051 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26052 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26053 VULKAN_HPP_ASSERT( d.vkCmdSetViewportSwizzleNV && 26054 "Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 26055 # endif 26056 26057 d.vkCmdSetViewportSwizzleNV( 26058 m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) ); 26059 } 26060 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26061 26062 template <typename Dispatch> setCoverageToColorEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,Dispatch const & d) const26063 VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, 26064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26065 { 26066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26067 d.vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) ); 26068 } 26069 26070 template <typename Dispatch> setCoverageToColorLocationNV(uint32_t coverageToColorLocation,Dispatch const & d) const26071 VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26072 { 26073 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26074 d.vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation ); 26075 } 26076 26077 template <typename Dispatch> setCoverageModulationModeNV(VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,Dispatch const & d) const26078 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, 26079 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26080 { 26081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26082 d.vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) ); 26083 } 26084 26085 template <typename Dispatch> setCoverageModulationTableEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,Dispatch const & d) const26086 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, 26087 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26088 { 26089 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26090 d.vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageModulationTableEnable ) ); 26091 } 26092 26093 template <typename Dispatch> setCoverageModulationTableNV(uint32_t coverageModulationTableCount,const float * pCoverageModulationTable,Dispatch const & d) const26094 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount, 26095 const float * pCoverageModulationTable, 26096 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26097 { 26098 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26099 d.vkCmdSetCoverageModulationTableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTableCount, pCoverageModulationTable ); 26100 } 26101 26102 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26103 template <typename Dispatch> setCoverageModulationTableNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,Dispatch const & d) const26104 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable, 26105 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26106 { 26107 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26108 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26109 VULKAN_HPP_ASSERT( d.vkCmdSetCoverageModulationTableNV && 26110 "Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 26111 # endif 26112 26113 d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() ); 26114 } 26115 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26116 26117 template <typename Dispatch> setShadingRateImageEnableNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,Dispatch const & d) const26118 VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, 26119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26120 { 26121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26122 d.vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) ); 26123 } 26124 26125 template <typename Dispatch> setRepresentativeFragmentTestEnableNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,Dispatch const & d) const26126 VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, 26127 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26128 { 26129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26130 d.vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( representativeFragmentTestEnable ) ); 26131 } 26132 26133 template <typename Dispatch> setCoverageReductionModeNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,Dispatch const & d) const26134 VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, 26135 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26136 { 26137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26138 d.vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) ); 26139 } 26140 26141 //=== VK_EXT_shader_module_identifier === 26142 26143 template <typename Dispatch> getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,Dispatch const & d) const26144 VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 26145 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, 26146 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26147 { 26148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26149 d.vkGetShaderModuleIdentifierEXT( 26150 static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); 26151 } 26152 26153 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26154 template <typename Dispatch> 26155 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Dispatch const & d) const26156 Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26157 { 26158 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26159 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26160 VULKAN_HPP_ASSERT( d.vkGetShaderModuleIdentifierEXT && "Function <vkGetShaderModuleIdentifierEXT> requires <VK_EXT_shader_module_identifier>" ); 26161 # endif 26162 26163 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; 26164 d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); 26165 26166 return identifier; 26167 } 26168 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26169 26170 template <typename Dispatch> getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,Dispatch const & d) const26171 VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, 26172 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, 26173 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26174 { 26175 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26176 d.vkGetShaderModuleCreateInfoIdentifierEXT( static_cast<VkDevice>( m_device ), 26177 reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), 26178 reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); 26179 } 26180 26181 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26182 template <typename Dispatch> 26183 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Dispatch const & d) const26184 Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 26185 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26186 { 26187 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26188 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26189 VULKAN_HPP_ASSERT( d.vkGetShaderModuleCreateInfoIdentifierEXT && 26190 "Function <vkGetShaderModuleCreateInfoIdentifierEXT> requires <VK_EXT_shader_module_identifier>" ); 26191 # endif 26192 26193 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; 26194 d.vkGetShaderModuleCreateInfoIdentifierEXT( 26195 m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); 26196 26197 return identifier; 26198 } 26199 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26200 26201 //=== VK_NV_optical_flow === 26202 26203 template <typename Dispatch> 26204 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,uint32_t * pFormatCount,VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,Dispatch const & d) const26205 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, 26206 uint32_t * pFormatCount, 26207 VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, 26208 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26209 { 26210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26211 return static_cast<Result>( 26212 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), 26213 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ), 26214 pFormatCount, 26215 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) ); 26216 } 26217 26218 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26219 template <typename OpticalFlowImageFormatPropertiesNVAllocator, 26220 typename Dispatch, 26221 typename std::enable_if< 26222 std::is_same<typename OpticalFlowImageFormatPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value, 26223 int>::type> 26224 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 26225 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,Dispatch const & d) const26226 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, 26227 Dispatch const & d ) const 26228 { 26229 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26230 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26231 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && 26232 "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" ); 26233 # endif 26234 26235 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties; 26236 uint32_t formatCount; 26237 VULKAN_HPP_NAMESPACE::Result result; 26238 do 26239 { 26240 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( 26241 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); 26242 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) 26243 { 26244 imageFormatProperties.resize( formatCount ); 26245 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26246 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 26247 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), 26248 &formatCount, 26249 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); 26250 } 26251 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26252 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); 26253 VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); 26254 if ( formatCount < imageFormatProperties.size() ) 26255 { 26256 imageFormatProperties.resize( formatCount ); 26257 } 26258 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 26259 } 26260 26261 template <typename OpticalFlowImageFormatPropertiesNVAllocator, 26262 typename Dispatch, 26263 typename std::enable_if< 26264 std::is_same<typename OpticalFlowImageFormatPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value, 26265 int>::type> 26266 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 26267 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,Dispatch const & d) const26268 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, 26269 OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, 26270 Dispatch const & d ) const 26271 { 26272 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26273 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26274 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && 26275 "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" ); 26276 # endif 26277 26278 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties( 26279 opticalFlowImageFormatPropertiesNVAllocator ); 26280 uint32_t formatCount; 26281 VULKAN_HPP_NAMESPACE::Result result; 26282 do 26283 { 26284 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( 26285 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); 26286 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) 26287 { 26288 imageFormatProperties.resize( formatCount ); 26289 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26290 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 26291 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), 26292 &formatCount, 26293 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); 26294 } 26295 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26296 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); 26297 VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); 26298 if ( formatCount < imageFormatProperties.size() ) 26299 { 26300 imageFormatProperties.resize( formatCount ); 26301 } 26302 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 26303 } 26304 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26305 26306 template <typename Dispatch> createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,Dispatch const & d) const26307 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, 26308 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26309 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, 26310 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26311 { 26312 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26313 return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( static_cast<VkDevice>( m_device ), 26314 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ), 26315 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 26316 reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) ); 26317 } 26318 26319 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26320 template <typename Dispatch> 26321 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26322 Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, 26323 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26324 Dispatch const & d ) const 26325 { 26326 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26327 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26328 VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 26329 # endif 26330 26331 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; 26332 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV( 26333 m_device, 26334 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), 26335 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26336 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); 26337 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); 26338 26339 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( session ) ); 26340 } 26341 26342 # ifndef VULKAN_HPP_NO_SMART_HANDLE 26343 template <typename Dispatch> 26344 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type createOpticalFlowSessionNVUnique(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26345 Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, 26346 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26347 Dispatch const & d ) const 26348 { 26349 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26350 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26351 VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 26352 # endif 26353 26354 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; 26355 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV( 26356 m_device, 26357 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), 26358 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26359 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); 26360 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); 26361 26362 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 26363 result, UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>( session, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 26364 } 26365 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 26366 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26367 26368 template <typename Dispatch> destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const26369 VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 26370 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26371 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26372 { 26373 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26374 d.vkDestroyOpticalFlowSessionNV( 26375 static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 26376 } 26377 26378 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26379 template <typename Dispatch> destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26380 VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 26381 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26382 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26383 { 26384 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26385 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26386 VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 26387 # endif 26388 26389 d.vkDestroyOpticalFlowSessionNV( 26390 m_device, 26391 static_cast<VkOpticalFlowSessionNV>( session ), 26392 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 26393 } 26394 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26395 26396 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const26397 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 26398 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26399 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26400 { 26401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26402 d.vkDestroyOpticalFlowSessionNV( 26403 static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 26404 } 26405 26406 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26407 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26408 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 26409 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26410 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26411 { 26412 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26413 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26414 VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 26415 # endif 26416 26417 d.vkDestroyOpticalFlowSessionNV( 26418 m_device, 26419 static_cast<VkOpticalFlowSessionNV>( session ), 26420 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 26421 } 26422 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26423 26424 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 26425 template <typename Dispatch> bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,VULKAN_HPP_NAMESPACE::ImageView view,VULKAN_HPP_NAMESPACE::ImageLayout layout,Dispatch const & d) const26426 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 26427 VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, 26428 VULKAN_HPP_NAMESPACE::ImageView view, 26429 VULKAN_HPP_NAMESPACE::ImageLayout layout, 26430 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26431 { 26432 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26433 return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( static_cast<VkDevice>( m_device ), 26434 static_cast<VkOpticalFlowSessionNV>( session ), 26435 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), 26436 static_cast<VkImageView>( view ), 26437 static_cast<VkImageLayout>( layout ) ) ); 26438 } 26439 #else 26440 template <typename Dispatch> 26441 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,VULKAN_HPP_NAMESPACE::ImageView view,VULKAN_HPP_NAMESPACE::ImageLayout layout,Dispatch const & d) const26442 Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 26443 VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, 26444 VULKAN_HPP_NAMESPACE::ImageView view, 26445 VULKAN_HPP_NAMESPACE::ImageLayout layout, 26446 Dispatch const & d ) const 26447 { 26448 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26449 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26450 VULKAN_HPP_ASSERT( d.vkBindOpticalFlowSessionImageNV && "Function <vkBindOpticalFlowSessionImageNV> requires <VK_NV_optical_flow>" ); 26451 # endif 26452 26453 VULKAN_HPP_NAMESPACE::Result result = 26454 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindOpticalFlowSessionImageNV( m_device, 26455 static_cast<VkOpticalFlowSessionNV>( session ), 26456 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), 26457 static_cast<VkImageView>( view ), 26458 static_cast<VkImageLayout>( layout ) ) ); 26459 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); 26460 26461 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 26462 } 26463 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 26464 26465 template <typename Dispatch> opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,Dispatch const & d) const26466 VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 26467 const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, 26468 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26469 { 26470 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26471 d.vkCmdOpticalFlowExecuteNV( static_cast<VkCommandBuffer>( m_commandBuffer ), 26472 static_cast<VkOpticalFlowSessionNV>( session ), 26473 reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) ); 26474 } 26475 26476 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26477 template <typename Dispatch> opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,Dispatch const & d) const26478 VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 26479 const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, 26480 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26481 { 26482 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26483 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26484 VULKAN_HPP_ASSERT( d.vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> requires <VK_NV_optical_flow>" ); 26485 # endif 26486 26487 d.vkCmdOpticalFlowExecuteNV( 26488 m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) ); 26489 } 26490 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26491 26492 //=== VK_KHR_maintenance5 === 26493 26494 template <typename Dispatch> bindIndexBuffer2KHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const26495 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 26496 VULKAN_HPP_NAMESPACE::DeviceSize offset, 26497 VULKAN_HPP_NAMESPACE::DeviceSize size, 26498 VULKAN_HPP_NAMESPACE::IndexType indexType, 26499 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26500 { 26501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26502 d.vkCmdBindIndexBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 26503 static_cast<VkBuffer>( buffer ), 26504 static_cast<VkDeviceSize>( offset ), 26505 static_cast<VkDeviceSize>( size ), 26506 static_cast<VkIndexType>( indexType ) ); 26507 } 26508 26509 template <typename Dispatch> getRenderingAreaGranularityKHR(const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const26510 VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, 26511 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 26512 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26513 { 26514 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26515 d.vkGetRenderingAreaGranularityKHR( 26516 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkRenderingAreaInfo *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 26517 } 26518 26519 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26520 template <typename Dispatch> 26521 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D getRenderingAreaGranularityKHR(const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo,Dispatch const & d) const26522 Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26523 { 26524 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26525 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26526 VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularityKHR && "Function <vkGetRenderingAreaGranularityKHR> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 26527 # endif 26528 26529 VULKAN_HPP_NAMESPACE::Extent2D granularity; 26530 d.vkGetRenderingAreaGranularityKHR( 26531 m_device, reinterpret_cast<const VkRenderingAreaInfo *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 26532 26533 return granularity; 26534 } 26535 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26536 26537 template <typename Dispatch> getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const26538 VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, 26539 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 26540 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26541 { 26542 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26543 d.vkGetDeviceImageSubresourceLayoutKHR( 26544 static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageSubresourceInfo *>( pInfo ), reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 26545 } 26546 26547 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26548 template <typename Dispatch> 26549 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info,Dispatch const & d) const26550 Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26551 { 26552 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26553 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26554 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && 26555 "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 26556 # endif 26557 26558 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 26559 d.vkGetDeviceImageSubresourceLayoutKHR( 26560 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 26561 26562 return layout; 26563 } 26564 26565 template <typename X, typename Y, typename... Z, typename Dispatch> 26566 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info,Dispatch const & d) const26567 Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26568 { 26569 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26570 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26571 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && 26572 "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 26573 # endif 26574 26575 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 26576 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 26577 d.vkGetDeviceImageSubresourceLayoutKHR( 26578 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 26579 26580 return structureChain; 26581 } 26582 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26583 26584 template <typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout,Dispatch const & d) const26585 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, 26586 const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, 26587 VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, 26588 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26589 { 26590 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26591 d.vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ), 26592 static_cast<VkImage>( image ), 26593 reinterpret_cast<const VkImageSubresource2 *>( pSubresource ), 26594 reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); 26595 } 26596 26597 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26598 template <typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const26599 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2KHR( 26600 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26601 { 26602 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26603 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26604 VULKAN_HPP_ASSERT( 26605 d.vkGetImageSubresourceLayout2KHR && 26606 "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 26607 # endif 26608 26609 VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; 26610 d.vkGetImageSubresourceLayout2KHR( m_device, 26611 static_cast<VkImage>( image ), 26612 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 26613 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 26614 26615 return layout; 26616 } 26617 26618 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource,Dispatch const & d) const26619 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2KHR( 26620 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26621 { 26622 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26623 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26624 VULKAN_HPP_ASSERT( 26625 d.vkGetImageSubresourceLayout2KHR && 26626 "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); 26627 # endif 26628 26629 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 26630 VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2>(); 26631 d.vkGetImageSubresourceLayout2KHR( m_device, 26632 static_cast<VkImage>( image ), 26633 reinterpret_cast<const VkImageSubresource2 *>( &subresource ), 26634 reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); 26635 26636 return structureChain; 26637 } 26638 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26639 26640 //=== VK_AMD_anti_lag === 26641 26642 template <typename Dispatch> antiLagUpdateAMD(const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData,Dispatch const & d) const26643 VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26644 { 26645 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26646 d.vkAntiLagUpdateAMD( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAntiLagDataAMD *>( pData ) ); 26647 } 26648 26649 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26650 template <typename Dispatch> antiLagUpdateAMD(const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data,Dispatch const & d) const26651 VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26652 { 26653 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26654 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26655 VULKAN_HPP_ASSERT( d.vkAntiLagUpdateAMD && "Function <vkAntiLagUpdateAMD> requires <VK_AMD_anti_lag>" ); 26656 # endif 26657 26658 d.vkAntiLagUpdateAMD( m_device, reinterpret_cast<const VkAntiLagDataAMD *>( &data ) ); 26659 } 26660 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26661 26662 //=== VK_EXT_shader_object === 26663 26664 template <typename Dispatch> createShadersEXT(uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,Dispatch const & d) const26665 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount, 26666 const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, 26667 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26668 VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, 26669 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26670 { 26671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26672 return static_cast<Result>( d.vkCreateShadersEXT( static_cast<VkDevice>( m_device ), 26673 createInfoCount, 26674 reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ), 26675 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 26676 reinterpret_cast<VkShaderEXT *>( pShaders ) ) ); 26677 } 26678 26679 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26680 template <typename ShaderEXTAllocator, 26681 typename Dispatch, 26682 typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, VULKAN_HPP_NAMESPACE::ShaderEXT>::value, int>::type> 26683 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>> createShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26684 Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 26685 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26686 Dispatch const & d ) const 26687 { 26688 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26689 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26690 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 26691 # endif 26692 26693 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size() ); 26694 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26695 d.vkCreateShadersEXT( m_device, 26696 createInfos.size(), 26697 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 26698 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26699 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 26700 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 26701 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", 26702 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 26703 26704 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>( result, std::move( shaders ) ); 26705 } 26706 26707 template <typename ShaderEXTAllocator, 26708 typename Dispatch, 26709 typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, VULKAN_HPP_NAMESPACE::ShaderEXT>::value, int>::type> 26710 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>> createShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,ShaderEXTAllocator & shaderEXTAllocator,Dispatch const & d) const26711 Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 26712 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26713 ShaderEXTAllocator & shaderEXTAllocator, 26714 Dispatch const & d ) const 26715 { 26716 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26717 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26718 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 26719 # endif 26720 26721 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size(), shaderEXTAllocator ); 26722 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26723 d.vkCreateShadersEXT( m_device, 26724 createInfos.size(), 26725 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 26726 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26727 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 26728 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 26729 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", 26730 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 26731 26732 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>( result, std::move( shaders ) ); 26733 } 26734 26735 template <typename Dispatch> 26736 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::ShaderEXT> createShaderEXT(const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26737 Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, 26738 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26739 Dispatch const & d ) const 26740 { 26741 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26742 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26743 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 26744 # endif 26745 26746 VULKAN_HPP_NAMESPACE::ShaderEXT shader; 26747 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26748 d.vkCreateShadersEXT( m_device, 26749 1, 26750 reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), 26751 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26752 reinterpret_cast<VkShaderEXT *>( &shader ) ) ); 26753 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 26754 VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", 26755 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 26756 26757 return ResultValue<VULKAN_HPP_NAMESPACE::ShaderEXT>( result, std::move( shader ) ); 26758 } 26759 26760 # ifndef VULKAN_HPP_NO_SMART_HANDLE 26761 template < 26762 typename Dispatch, 26763 typename ShaderEXTAllocator, 26764 typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::value, int>::type> 26765 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>> createShadersEXTUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26766 Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 26767 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26768 Dispatch const & d ) const 26769 { 26770 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26771 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26772 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 26773 # endif 26774 26775 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() ); 26776 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26777 d.vkCreateShadersEXT( m_device, 26778 createInfos.size(), 26779 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 26780 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26781 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 26782 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 26783 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", 26784 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 26785 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders; 26786 uniqueShaders.reserve( createInfos.size() ); 26787 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 26788 for ( auto const & shader : shaders ) 26789 { 26790 uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); 26791 } 26792 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); 26793 } 26794 26795 template < 26796 typename Dispatch, 26797 typename ShaderEXTAllocator, 26798 typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::value, int>::type> 26799 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>> createShadersEXTUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,ShaderEXTAllocator & shaderEXTAllocator,Dispatch const & d) const26800 Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 26801 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26802 ShaderEXTAllocator & shaderEXTAllocator, 26803 Dispatch const & d ) const 26804 { 26805 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26806 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26807 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 26808 # endif 26809 26810 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() ); 26811 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26812 d.vkCreateShadersEXT( m_device, 26813 createInfos.size(), 26814 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 26815 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26816 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 26817 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 26818 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", 26819 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 26820 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); 26821 uniqueShaders.reserve( createInfos.size() ); 26822 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 26823 for ( auto const & shader : shaders ) 26824 { 26825 uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); 26826 } 26827 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); 26828 } 26829 26830 template <typename Dispatch> 26831 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>> createShaderEXTUnique(const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26832 Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, 26833 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26834 Dispatch const & d ) const 26835 { 26836 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26837 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26838 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 26839 # endif 26840 26841 VULKAN_HPP_NAMESPACE::ShaderEXT shader; 26842 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26843 d.vkCreateShadersEXT( m_device, 26844 1, 26845 reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), 26846 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 26847 reinterpret_cast<VkShaderEXT *>( &shader ) ) ); 26848 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 26849 VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", 26850 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 26851 26852 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>( 26853 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>( shader, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 26854 } 26855 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 26856 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26857 26858 template <typename Dispatch> destroyShaderEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const26859 VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 26860 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26861 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26862 { 26863 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26864 d.vkDestroyShaderEXT( 26865 static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 26866 } 26867 26868 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26869 template <typename Dispatch> destroyShaderEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26870 VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 26871 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26872 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26873 { 26874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26875 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26876 VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" ); 26877 # endif 26878 26879 d.vkDestroyShaderEXT( m_device, 26880 static_cast<VkShaderEXT>( shader ), 26881 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 26882 } 26883 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26884 26885 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderEXT shader,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const26886 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 26887 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 26888 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26889 { 26890 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26891 d.vkDestroyShaderEXT( 26892 static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 26893 } 26894 26895 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26896 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const26897 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 26898 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 26899 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26900 { 26901 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26902 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26903 VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" ); 26904 # endif 26905 26906 d.vkDestroyShaderEXT( m_device, 26907 static_cast<VkShaderEXT>( shader ), 26908 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 26909 } 26910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26911 26912 template <typename Dispatch> 26913 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,size_t * pDataSize,void * pData,Dispatch const & d) const26914 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26915 { 26916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26917 return static_cast<Result>( d.vkGetShaderBinaryDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), pDataSize, pData ) ); 26918 } 26919 26920 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26921 template <typename Uint8_tAllocator, 26922 typename Dispatch, 26923 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 26924 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Dispatch const & d) const26925 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const 26926 { 26927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26928 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26929 VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" ); 26930 # endif 26931 26932 std::vector<uint8_t, Uint8_tAllocator> data; 26933 size_t dataSize; 26934 VULKAN_HPP_NAMESPACE::Result result; 26935 do 26936 { 26937 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); 26938 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 26939 { 26940 data.resize( dataSize ); 26941 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26942 d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 26943 } 26944 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26945 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); 26946 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 26947 if ( dataSize < data.size() ) 26948 { 26949 data.resize( dataSize ); 26950 } 26951 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 26952 } 26953 26954 template <typename Uint8_tAllocator, 26955 typename Dispatch, 26956 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 26957 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const26958 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 26959 { 26960 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26961 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26962 VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" ); 26963 # endif 26964 26965 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 26966 size_t dataSize; 26967 VULKAN_HPP_NAMESPACE::Result result; 26968 do 26969 { 26970 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); 26971 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 26972 { 26973 data.resize( dataSize ); 26974 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26975 d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 26976 } 26977 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26978 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); 26979 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 26980 if ( dataSize < data.size() ) 26981 { 26982 data.resize( dataSize ); 26983 } 26984 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 26985 } 26986 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26987 26988 template <typename Dispatch> bindShadersEXT(uint32_t stageCount,const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages,const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,Dispatch const & d) const26989 VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount, 26990 const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, 26991 const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, 26992 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26993 { 26994 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26995 d.vkCmdBindShadersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 26996 stageCount, 26997 reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), 26998 reinterpret_cast<const VkShaderEXT *>( pShaders ) ); 26999 } 27000 27001 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27002 template <typename Dispatch> bindShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,Dispatch const & d) const27003 VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages, 27004 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders, 27005 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 27006 { 27007 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27008 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27009 VULKAN_HPP_ASSERT( d.vkCmdBindShadersEXT && "Function <vkCmdBindShadersEXT> requires <VK_EXT_shader_object>" ); 27010 # endif 27011 # ifdef VULKAN_HPP_NO_EXCEPTIONS 27012 VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); 27013 # else 27014 if ( stages.size() != shaders.size() ) 27015 { 27016 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); 27017 } 27018 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 27019 27020 d.vkCmdBindShadersEXT( m_commandBuffer, 27021 stages.size(), 27022 reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ), 27023 reinterpret_cast<const VkShaderEXT *>( shaders.data() ) ); 27024 } 27025 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27026 27027 template <typename Dispatch> setDepthClampRangeEXT(VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode,const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange,Dispatch const & d) const27028 VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, 27029 const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange, 27030 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27031 { 27032 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27033 d.vkCmdSetDepthClampRangeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 27034 static_cast<VkDepthClampModeEXT>( depthClampMode ), 27035 reinterpret_cast<const VkDepthClampRangeEXT *>( pDepthClampRange ) ); 27036 } 27037 27038 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27039 template <typename Dispatch> setDepthClampRangeEXT(VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode,Optional<const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT> depthClampRange,Dispatch const & d) const27040 VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, 27041 Optional<const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT> depthClampRange, 27042 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27043 { 27044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27045 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27046 VULKAN_HPP_ASSERT( d.vkCmdSetDepthClampRangeEXT && 27047 "Function <vkCmdSetDepthClampRangeEXT> requires <VK_EXT_depth_clamp_control> or <VK_EXT_shader_object>" ); 27048 # endif 27049 27050 d.vkCmdSetDepthClampRangeEXT( 27051 m_commandBuffer, 27052 static_cast<VkDepthClampModeEXT>( depthClampMode ), 27053 reinterpret_cast<const VkDepthClampRangeEXT *>( static_cast<const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT *>( depthClampRange ) ) ); 27054 } 27055 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27056 27057 //=== VK_KHR_pipeline_binary === 27058 27059 template <typename Dispatch> createPipelineBinariesKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries,Dispatch const & d) const27060 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo, 27061 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 27062 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, 27063 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27064 { 27065 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27066 return static_cast<Result>( d.vkCreatePipelineBinariesKHR( static_cast<VkDevice>( m_device ), 27067 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( pCreateInfo ), 27068 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 27069 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( pBinaries ) ) ); 27070 } 27071 27072 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27073 template <typename PipelineBinaryKHRAllocator, 27074 typename Dispatch, 27075 typename std::enable_if<std::is_same<typename PipelineBinaryKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineBinaryKHR>::value, int>::type> 27076 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator>> createPipelineBinariesKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const27077 Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, 27078 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 27079 Dispatch const & d ) const 27080 { 27081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27082 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27083 VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); 27084 # endif 27085 27086 std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator> pipelineBinaries; 27087 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; 27088 VULKAN_HPP_NAMESPACE::Result result; 27089 if ( createInfo.pKeysAndDataInfo ) 27090 { 27091 VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); 27092 pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); 27093 binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; 27094 binaries.pPipelineBinaries = pipelineBinaries.data(); 27095 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27096 m_device, 27097 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27098 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27099 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27100 } 27101 else 27102 { 27103 VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); 27104 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27105 m_device, 27106 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27107 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27108 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27109 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 27110 { 27111 pipelineBinaries.resize( binaries.pipelineBinaryCount ); 27112 binaries.pPipelineBinaries = pipelineBinaries.data(); 27113 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27114 m_device, 27115 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27116 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27117 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27118 } 27119 } 27120 27121 VULKAN_HPP_NAMESPACE::detail::resultCheck( 27122 result, 27123 VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", 27124 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); 27125 27126 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator>>( result, std::move( pipelineBinaries ) ); 27127 } 27128 27129 template <typename PipelineBinaryKHRAllocator, 27130 typename Dispatch, 27131 typename std::enable_if<std::is_same<typename PipelineBinaryKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineBinaryKHR>::value, int>::type> 27132 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator>> createPipelineBinariesKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator,Dispatch const & d) const27133 Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, 27134 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 27135 PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, 27136 Dispatch const & d ) const 27137 { 27138 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27139 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27140 VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); 27141 # endif 27142 27143 std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator> pipelineBinaries( pipelineBinaryKHRAllocator ); 27144 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; 27145 VULKAN_HPP_NAMESPACE::Result result; 27146 if ( createInfo.pKeysAndDataInfo ) 27147 { 27148 VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); 27149 pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); 27150 binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; 27151 binaries.pPipelineBinaries = pipelineBinaries.data(); 27152 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27153 m_device, 27154 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27155 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27156 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27157 } 27158 else 27159 { 27160 VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); 27161 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27162 m_device, 27163 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27164 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27165 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27166 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 27167 { 27168 pipelineBinaries.resize( binaries.pipelineBinaryCount ); 27169 binaries.pPipelineBinaries = pipelineBinaries.data(); 27170 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27171 m_device, 27172 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27173 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27174 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27175 } 27176 } 27177 27178 VULKAN_HPP_NAMESPACE::detail::resultCheck( 27179 result, 27180 VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", 27181 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); 27182 27183 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, PipelineBinaryKHRAllocator>>( result, std::move( pipelineBinaries ) ); 27184 } 27185 27186 # ifndef VULKAN_HPP_NO_SMART_HANDLE 27187 template <typename Dispatch, 27188 typename PipelineBinaryKHRAllocator, 27189 typename std::enable_if< 27190 std::is_same<typename PipelineBinaryKHRAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>>::value, 27191 int>::type> 27192 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>> createPipelineBinariesKHRUnique(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const27193 Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, 27194 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 27195 Dispatch const & d ) const 27196 { 27197 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27198 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27199 VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); 27200 # endif 27201 27202 std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR> pipelineBinaries; 27203 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; 27204 VULKAN_HPP_NAMESPACE::Result result; 27205 if ( createInfo.pKeysAndDataInfo ) 27206 { 27207 VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); 27208 pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); 27209 binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; 27210 binaries.pPipelineBinaries = pipelineBinaries.data(); 27211 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27212 m_device, 27213 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27214 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27215 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27216 } 27217 else 27218 { 27219 VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); 27220 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27221 m_device, 27222 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27223 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27224 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27225 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 27226 { 27227 pipelineBinaries.resize( binaries.pipelineBinaryCount ); 27228 binaries.pPipelineBinaries = pipelineBinaries.data(); 27229 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27230 m_device, 27231 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27232 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27233 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27234 } 27235 } 27236 27237 VULKAN_HPP_NAMESPACE::detail::resultCheck( 27238 result, 27239 VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", 27240 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); 27241 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator> uniquePipelineBinaries; 27242 uniquePipelineBinaries.reserve( pipelineBinaries.size() ); 27243 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 27244 for ( auto const & pipelineBinary : pipelineBinaries ) 27245 { 27246 uniquePipelineBinaries.push_back( UniqueHandle<PipelineBinaryKHR, Dispatch>( pipelineBinary, deleter ) ); 27247 } 27248 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>>( 27249 result, std::move( uniquePipelineBinaries ) ); 27250 } 27251 27252 template <typename Dispatch, 27253 typename PipelineBinaryKHRAllocator, 27254 typename std::enable_if< 27255 std::is_same<typename PipelineBinaryKHRAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>>::value, 27256 int>::type> 27257 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>> createPipelineBinariesKHRUnique(const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator,Dispatch const & d) const27258 Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, 27259 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 27260 PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, 27261 Dispatch const & d ) const 27262 { 27263 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27264 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27265 VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); 27266 # endif 27267 27268 std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR> pipelineBinaries; 27269 VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; 27270 VULKAN_HPP_NAMESPACE::Result result; 27271 if ( createInfo.pKeysAndDataInfo ) 27272 { 27273 VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); 27274 pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); 27275 binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; 27276 binaries.pPipelineBinaries = pipelineBinaries.data(); 27277 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27278 m_device, 27279 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27280 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27281 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27282 } 27283 else 27284 { 27285 VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); 27286 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27287 m_device, 27288 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27289 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27290 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27291 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 27292 { 27293 pipelineBinaries.resize( binaries.pipelineBinaryCount ); 27294 binaries.pPipelineBinaries = pipelineBinaries.data(); 27295 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePipelineBinariesKHR( 27296 m_device, 27297 reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), 27298 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 27299 reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); 27300 } 27301 } 27302 27303 VULKAN_HPP_NAMESPACE::detail::resultCheck( 27304 result, 27305 VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", 27306 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); 27307 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator> uniquePipelineBinaries( 27308 pipelineBinaryKHRAllocator ); 27309 uniquePipelineBinaries.reserve( pipelineBinaries.size() ); 27310 detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 27311 for ( auto const & pipelineBinary : pipelineBinaries ) 27312 { 27313 uniquePipelineBinaries.push_back( UniqueHandle<PipelineBinaryKHR, Dispatch>( pipelineBinary, deleter ) ); 27314 } 27315 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>>( 27316 result, std::move( uniquePipelineBinaries ) ); 27317 } 27318 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 27319 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27320 27321 template <typename Dispatch> destroyPipelineBinaryKHR(VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const27322 VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, 27323 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 27324 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27325 { 27326 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27327 d.vkDestroyPipelineBinaryKHR( 27328 static_cast<VkDevice>( m_device ), static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 27329 } 27330 27331 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27332 template <typename Dispatch> destroyPipelineBinaryKHR(VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const27333 VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, 27334 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 27335 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27336 { 27337 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27338 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27339 VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function <vkDestroyPipelineBinaryKHR> requires <VK_KHR_pipeline_binary>" ); 27340 # endif 27341 27342 d.vkDestroyPipelineBinaryKHR( 27343 m_device, 27344 static_cast<VkPipelineBinaryKHR>( pipelineBinary ), 27345 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 27346 } 27347 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27348 27349 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const27350 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, 27351 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 27352 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27353 { 27354 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27355 d.vkDestroyPipelineBinaryKHR( 27356 static_cast<VkDevice>( m_device ), static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 27357 } 27358 27359 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27360 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const27361 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, 27362 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 27363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27364 { 27365 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27366 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27367 VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function <vkDestroyPipelineBinaryKHR> requires <VK_KHR_pipeline_binary>" ); 27368 # endif 27369 27370 d.vkDestroyPipelineBinaryKHR( 27371 m_device, 27372 static_cast<VkPipelineBinaryKHR>( pipelineBinary ), 27373 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 27374 } 27375 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27376 27377 template <typename Dispatch> getPipelineKeyKHR(const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo,VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey,Dispatch const & d) const27378 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineKeyKHR( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo, 27379 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey, 27380 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27381 { 27382 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27383 return static_cast<Result>( d.vkGetPipelineKeyKHR( static_cast<VkDevice>( m_device ), 27384 reinterpret_cast<const VkPipelineCreateInfoKHR *>( pPipelineCreateInfo ), 27385 reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineKey ) ) ); 27386 } 27387 27388 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27389 template <typename Dispatch> 27390 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR>::type getPipelineKeyKHR(Optional<const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR> pipelineCreateInfo,Dispatch const & d) const27391 Device::getPipelineKeyKHR( Optional<const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR> pipelineCreateInfo, Dispatch const & d ) const 27392 { 27393 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27394 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27395 VULKAN_HPP_ASSERT( d.vkGetPipelineKeyKHR && "Function <vkGetPipelineKeyKHR> requires <VK_KHR_pipeline_binary>" ); 27396 # endif 27397 27398 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey; 27399 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineKeyKHR( 27400 m_device, 27401 reinterpret_cast<const VkPipelineCreateInfoKHR *>( static_cast<const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR *>( pipelineCreateInfo ) ), 27402 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineKey ) ) ); 27403 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); 27404 27405 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineKey ) ); 27406 } 27407 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27408 27409 template <typename Dispatch> getPipelineBinaryDataKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo,VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey,size_t * pPipelineBinaryDataSize,void * pPipelineBinaryData,Dispatch const & d) const27410 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo, 27411 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey, 27412 size_t * pPipelineBinaryDataSize, 27413 void * pPipelineBinaryData, 27414 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27415 { 27416 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27417 return static_cast<Result>( d.vkGetPipelineBinaryDataKHR( static_cast<VkDevice>( m_device ), 27418 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( pInfo ), 27419 reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineBinaryKey ), 27420 pPipelineBinaryDataSize, 27421 pPipelineBinaryData ) ); 27422 } 27423 27424 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27425 template <typename Uint8_tAllocator, 27426 typename Dispatch, 27427 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 27428 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 27429 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getPipelineBinaryDataKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info,Dispatch const & d) const27430 Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d ) const 27431 { 27432 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27433 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27434 VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function <vkGetPipelineBinaryDataKHR> requires <VK_KHR_pipeline_binary>" ); 27435 # endif 27436 27437 std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>> data_; 27438 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; 27439 std::vector<uint8_t, Uint8_tAllocator> & pipelineBinaryData = data_.second; 27440 size_t pipelineBinaryDataSize; 27441 VULKAN_HPP_NAMESPACE::Result result = 27442 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineBinaryDataKHR( m_device, 27443 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), 27444 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), 27445 &pipelineBinaryDataSize, 27446 nullptr ) ); 27447 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 27448 { 27449 pipelineBinaryData.resize( pipelineBinaryDataSize ); 27450 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineBinaryDataKHR( m_device, 27451 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), 27452 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), 27453 &pipelineBinaryDataSize, 27454 reinterpret_cast<void *>( pipelineBinaryData.data() ) ) ); 27455 } 27456 27457 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); 27458 27459 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 27460 } 27461 27462 template <typename Uint8_tAllocator, 27463 typename Dispatch, 27464 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 27465 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 27466 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getPipelineBinaryDataKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const27467 Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, 27468 Uint8_tAllocator & uint8_tAllocator, 27469 Dispatch const & d ) const 27470 { 27471 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27472 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27473 VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function <vkGetPipelineBinaryDataKHR> requires <VK_KHR_pipeline_binary>" ); 27474 # endif 27475 27476 std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>> data_( 27477 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 27478 VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; 27479 std::vector<uint8_t, Uint8_tAllocator> & pipelineBinaryData = data_.second; 27480 size_t pipelineBinaryDataSize; 27481 VULKAN_HPP_NAMESPACE::Result result = 27482 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineBinaryDataKHR( m_device, 27483 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), 27484 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), 27485 &pipelineBinaryDataSize, 27486 nullptr ) ); 27487 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 27488 { 27489 pipelineBinaryData.resize( pipelineBinaryDataSize ); 27490 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineBinaryDataKHR( m_device, 27491 reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), 27492 reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), 27493 &pipelineBinaryDataSize, 27494 reinterpret_cast<void *>( pipelineBinaryData.data() ) ) ); 27495 } 27496 27497 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); 27498 27499 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 27500 } 27501 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27502 27503 template <typename Dispatch> releaseCapturedPipelineDataKHR(const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const27504 VULKAN_HPP_INLINE Result Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo, 27505 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 27506 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27507 { 27508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27509 return static_cast<Result>( d.vkReleaseCapturedPipelineDataKHR( static_cast<VkDevice>( m_device ), 27510 reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( pInfo ), 27511 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ) ); 27512 } 27513 27514 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27515 template <typename Dispatch> releaseCapturedPipelineDataKHR(const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const27516 VULKAN_HPP_INLINE void Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, 27517 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 27518 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27519 { 27520 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27521 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27522 VULKAN_HPP_ASSERT( d.vkReleaseCapturedPipelineDataKHR && "Function <vkReleaseCapturedPipelineDataKHR> requires <VK_KHR_pipeline_binary>" ); 27523 # endif 27524 27525 d.vkReleaseCapturedPipelineDataKHR( 27526 m_device, 27527 reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( &info ), 27528 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 27529 } 27530 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27531 27532 //=== VK_QCOM_tile_properties === 27533 27534 template <typename Dispatch> getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,uint32_t * pPropertiesCount,VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,Dispatch const & d) const27535 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 27536 uint32_t * pPropertiesCount, 27537 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, 27538 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27539 { 27540 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27541 return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( static_cast<VkDevice>( m_device ), 27542 static_cast<VkFramebuffer>( framebuffer ), 27543 pPropertiesCount, 27544 reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); 27545 } 27546 27547 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27548 template <typename TilePropertiesQCOMAllocator, 27549 typename Dispatch, 27550 typename std::enable_if<std::is_same<typename TilePropertiesQCOMAllocator::value_type, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, int>::type> 27551 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Dispatch const & d) const27552 Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const 27553 { 27554 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27555 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27556 VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); 27557 # endif 27558 27559 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties; 27560 uint32_t propertiesCount; 27561 VULKAN_HPP_NAMESPACE::Result result; 27562 do 27563 { 27564 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27565 d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); 27566 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) 27567 { 27568 properties.resize( propertiesCount ); 27569 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM( 27570 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); 27571 } 27572 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 27573 27574 VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); 27575 if ( propertiesCount < properties.size() ) 27576 { 27577 properties.resize( propertiesCount ); 27578 } 27579 return properties; 27580 } 27581 27582 template <typename TilePropertiesQCOMAllocator, 27583 typename Dispatch, 27584 typename std::enable_if<std::is_same<typename TilePropertiesQCOMAllocator::value_type, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, int>::type> 27585 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,Dispatch const & d) const27586 Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 27587 TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, 27588 Dispatch const & d ) const 27589 { 27590 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27591 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27592 VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); 27593 # endif 27594 27595 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator ); 27596 uint32_t propertiesCount; 27597 VULKAN_HPP_NAMESPACE::Result result; 27598 do 27599 { 27600 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27601 d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); 27602 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) 27603 { 27604 properties.resize( propertiesCount ); 27605 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM( 27606 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); 27607 } 27608 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 27609 27610 VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); 27611 if ( propertiesCount < properties.size() ) 27612 { 27613 properties.resize( propertiesCount ); 27614 } 27615 return properties; 27616 } 27617 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27618 27619 template <typename Dispatch> getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,Dispatch const & d) const27620 VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 27621 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, 27622 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27623 { 27624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27625 return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( static_cast<VkDevice>( m_device ), 27626 reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), 27627 reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); 27628 } 27629 27630 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27631 template <typename Dispatch> 27632 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const27633 Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27634 { 27635 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27636 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27637 VULKAN_HPP_ASSERT( d.vkGetDynamicRenderingTilePropertiesQCOM && "Function <vkGetDynamicRenderingTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); 27638 # endif 27639 27640 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; 27641 d.vkGetDynamicRenderingTilePropertiesQCOM( 27642 m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) ); 27643 27644 return properties; 27645 } 27646 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27647 27648 //=== VK_NV_low_latency2 === 27649 27650 template <typename Dispatch> setLatencySleepModeNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo,Dispatch const & d) const27651 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 27652 const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, 27653 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27654 { 27655 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27656 return static_cast<Result>( d.vkSetLatencySleepModeNV( 27657 static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) ); 27658 } 27659 27660 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27661 template <typename Dispatch> setLatencySleepModeNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo,Dispatch const & d) const27662 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 27663 const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, 27664 Dispatch const & d ) const 27665 { 27666 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27667 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27668 VULKAN_HPP_ASSERT( d.vkSetLatencySleepModeNV && "Function <vkSetLatencySleepModeNV> requires <VK_NV_low_latency2>" ); 27669 # endif 27670 27671 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27672 d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( &sleepModeInfo ) ) ); 27673 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); 27674 27675 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 27676 } 27677 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27678 27679 template <typename Dispatch> latencySleepNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo,Dispatch const & d) const27680 VULKAN_HPP_INLINE Result Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 27681 const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, 27682 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27683 { 27684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27685 return static_cast<Result>( d.vkLatencySleepNV( 27686 static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) ); 27687 } 27688 27689 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27690 template <typename Dispatch> latencySleepNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo,Dispatch const & d) const27691 VULKAN_HPP_INLINE void Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 27692 const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, 27693 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27694 { 27695 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27696 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27697 VULKAN_HPP_ASSERT( d.vkLatencySleepNV && "Function <vkLatencySleepNV> requires <VK_NV_low_latency2>" ); 27698 # endif 27699 27700 d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( &sleepInfo ) ); 27701 } 27702 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27703 27704 template <typename Dispatch> setLatencyMarkerNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo,Dispatch const & d) const27705 VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 27706 const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, 27707 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27708 { 27709 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27710 d.vkSetLatencyMarkerNV( 27711 static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); 27712 } 27713 27714 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27715 template <typename Dispatch> setLatencyMarkerNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo,Dispatch const & d) const27716 VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 27717 const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, 27718 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27719 { 27720 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27721 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27722 VULKAN_HPP_ASSERT( d.vkSetLatencyMarkerNV && "Function <vkSetLatencyMarkerNV> requires <VK_NV_low_latency2>" ); 27723 # endif 27724 27725 d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 27726 } 27727 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27728 27729 template <typename Dispatch> getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo,Dispatch const & d) const27730 VULKAN_HPP_INLINE void Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 27731 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, 27732 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27733 { 27734 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27735 d.vkGetLatencyTimingsNV( 27736 static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); 27737 } 27738 27739 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27740 template < 27741 typename LatencyTimingsFrameReportNVAllocator, 27742 typename Dispatch, 27743 typename std::enable_if<std::is_same<typename LatencyTimingsFrameReportNVAllocator::value_type, VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV>::value, 27744 int>::type> 27745 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const27746 Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 27747 { 27748 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27749 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27750 VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" ); 27751 # endif 27752 27753 std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> timings; 27754 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; 27755 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 27756 timings.resize( latencyMarkerInfo.timingCount ); 27757 latencyMarkerInfo.pTimings = timings.data(); 27758 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 27759 27760 return timings; 27761 } 27762 27763 template < 27764 typename LatencyTimingsFrameReportNVAllocator, 27765 typename Dispatch, 27766 typename std::enable_if<std::is_same<typename LatencyTimingsFrameReportNVAllocator::value_type, VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV>::value, 27767 int>::type> 27768 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator,Dispatch const & d) const27769 Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 27770 LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, 27771 Dispatch const & d ) const 27772 { 27773 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27774 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27775 VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" ); 27776 # endif 27777 27778 std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> timings( latencyTimingsFrameReportNVAllocator ); 27779 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; 27780 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 27781 timings.resize( latencyMarkerInfo.timingCount ); 27782 latencyMarkerInfo.pTimings = timings.data(); 27783 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 27784 27785 return timings; 27786 } 27787 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27788 27789 template <typename Dispatch> notifyOutOfBandNV(const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo,Dispatch const & d) const27790 VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, 27791 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27792 { 27793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27794 d.vkQueueNotifyOutOfBandNV( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) ); 27795 } 27796 27797 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27798 template <typename Dispatch> notifyOutOfBandNV(const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo,Dispatch const & d) const27799 VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, 27800 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27801 { 27802 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27803 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27804 VULKAN_HPP_ASSERT( d.vkQueueNotifyOutOfBandNV && "Function <vkQueueNotifyOutOfBandNV> requires <VK_NV_low_latency2>" ); 27805 # endif 27806 27807 d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( &queueTypeInfo ) ); 27808 } 27809 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27810 27811 //=== VK_KHR_cooperative_matrix === 27812 27813 template <typename Dispatch> getCooperativeMatrixPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties,Dispatch const & d) const27814 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR( 27815 uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27816 { 27817 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27818 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 27819 static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) ); 27820 } 27821 27822 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27823 template <typename CooperativeMatrixPropertiesKHRAllocator, 27824 typename Dispatch, 27825 typename std::enable_if< 27826 std::is_same<typename CooperativeMatrixPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>::value, 27827 int>::type> 27828 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 27829 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type getCooperativeMatrixPropertiesKHR(Dispatch const & d) const27830 PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const 27831 { 27832 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27833 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27834 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && 27835 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" ); 27836 # endif 27837 27838 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties; 27839 uint32_t propertyCount; 27840 VULKAN_HPP_NAMESPACE::Result result; 27841 do 27842 { 27843 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 27844 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 27845 { 27846 properties.resize( propertyCount ); 27847 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 27848 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); 27849 } 27850 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 27851 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); 27852 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 27853 if ( propertyCount < properties.size() ) 27854 { 27855 properties.resize( propertyCount ); 27856 } 27857 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 27858 } 27859 27860 template <typename CooperativeMatrixPropertiesKHRAllocator, 27861 typename Dispatch, 27862 typename std::enable_if< 27863 std::is_same<typename CooperativeMatrixPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>::value, 27864 int>::type> 27865 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 27866 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type getCooperativeMatrixPropertiesKHR(CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator,Dispatch const & d) const27867 PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, 27868 Dispatch const & d ) const 27869 { 27870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27871 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27872 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && 27873 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" ); 27874 # endif 27875 27876 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties( 27877 cooperativeMatrixPropertiesKHRAllocator ); 27878 uint32_t propertyCount; 27879 VULKAN_HPP_NAMESPACE::Result result; 27880 do 27881 { 27882 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 27883 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 27884 { 27885 properties.resize( propertyCount ); 27886 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 27887 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); 27888 } 27889 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 27890 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); 27891 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 27892 if ( propertyCount < properties.size() ) 27893 { 27894 properties.resize( propertyCount ); 27895 } 27896 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 27897 } 27898 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27899 27900 //=== VK_EXT_attachment_feedback_loop_dynamic_state === 27901 27902 template <typename Dispatch> setAttachmentFeedbackLoopEnableEXT(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask,Dispatch const & d) const27903 VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, 27904 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27905 { 27906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27907 d.vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageAspectFlags>( aspectMask ) ); 27908 } 27909 27910 #if defined( VK_USE_PLATFORM_SCREEN_QNX ) 27911 //=== VK_QNX_external_memory_screen_buffer === 27912 27913 template <typename Dispatch> getScreenBufferPropertiesQNX(const struct _screen_buffer * buffer,VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties,Dispatch const & d) const27914 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, 27915 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, 27916 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27917 { 27918 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27919 return static_cast<Result>( 27920 d.vkGetScreenBufferPropertiesQNX( static_cast<VkDevice>( m_device ), buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) ); 27921 } 27922 27923 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27924 template <typename Dispatch> 27925 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>::type getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const27926 Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const 27927 { 27928 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27929 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27930 VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" ); 27931 # endif 27932 27933 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; 27934 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27935 d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); 27936 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); 27937 27938 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 27939 } 27940 27941 template <typename X, typename Y, typename... Z, typename Dispatch> 27942 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const27943 Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const 27944 { 27945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27946 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27947 VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" ); 27948 # endif 27949 27950 StructureChain<X, Y, Z...> structureChain; 27951 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>(); 27952 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 27953 d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); 27954 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); 27955 27956 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 27957 } 27958 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 27959 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ 27960 27961 //=== VK_KHR_line_rasterization === 27962 27963 template <typename Dispatch> 27964 VULKAN_HPP_INLINE void setLineStippleKHR(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const27965 CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27966 { 27967 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27968 d.vkCmdSetLineStippleKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); 27969 } 27970 27971 //=== VK_KHR_calibrated_timestamps === 27972 27973 template <typename Dispatch> getCalibrateableTimeDomainsKHR(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,Dispatch const & d) const27974 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, 27975 VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, 27976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 27977 { 27978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27979 return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( 27980 static_cast<VkPhysicalDevice>( m_physicalDevice ), pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); 27981 } 27982 27983 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 27984 template <typename TimeDomainKHRAllocator, 27985 typename Dispatch, 27986 typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 27987 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsKHR(Dispatch const & d) const27988 PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const 27989 { 27990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27991 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 27992 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && 27993 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 27994 # endif 27995 27996 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; 27997 uint32_t timeDomainCount; 27998 VULKAN_HPP_NAMESPACE::Result result; 27999 do 28000 { 28001 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); 28002 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 28003 { 28004 timeDomains.resize( timeDomainCount ); 28005 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 28006 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 28007 } 28008 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 28009 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); 28010 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 28011 if ( timeDomainCount < timeDomains.size() ) 28012 { 28013 timeDomains.resize( timeDomainCount ); 28014 } 28015 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 28016 } 28017 28018 template <typename TimeDomainKHRAllocator, 28019 typename Dispatch, 28020 typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 28021 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsKHR(TimeDomainKHRAllocator & timeDomainKHRAllocator,Dispatch const & d) const28022 PhysicalDevice::getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const 28023 { 28024 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28025 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28026 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && 28027 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 28028 # endif 28029 28030 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); 28031 uint32_t timeDomainCount; 28032 VULKAN_HPP_NAMESPACE::Result result; 28033 do 28034 { 28035 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); 28036 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 28037 { 28038 timeDomains.resize( timeDomainCount ); 28039 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 28040 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 28041 } 28042 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 28043 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); 28044 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 28045 if ( timeDomainCount < timeDomains.size() ) 28046 { 28047 timeDomains.resize( timeDomainCount ); 28048 } 28049 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 28050 } 28051 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28052 28053 template <typename Dispatch> getCalibratedTimestampsKHR(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const28054 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsKHR( uint32_t timestampCount, 28055 const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, 28056 uint64_t * pTimestamps, 28057 uint64_t * pMaxDeviation, 28058 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28059 { 28060 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28061 return static_cast<Result>( d.vkGetCalibratedTimestampsKHR( static_cast<VkDevice>( m_device ), 28062 timestampCount, 28063 reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), 28064 pTimestamps, 28065 pMaxDeviation ) ); 28066 } 28067 28068 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28069 template <typename Uint64_tAllocator, 28070 typename Dispatch, 28071 typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type> 28072 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Dispatch const & d) const28073 Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 28074 Dispatch const & d ) const 28075 { 28076 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28077 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28078 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && 28079 "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 28080 # endif 28081 28082 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 28083 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); 28084 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 28085 uint64_t & maxDeviation = data_.second; 28086 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR( 28087 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 28088 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); 28089 28090 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 28091 } 28092 28093 template <typename Uint64_tAllocator, 28094 typename Dispatch, 28095 typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type> 28096 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const28097 Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 28098 Uint64_tAllocator & uint64_tAllocator, 28099 Dispatch const & d ) const 28100 { 28101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28102 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28103 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && 28104 "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 28105 # endif 28106 28107 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 28108 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); 28109 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 28110 uint64_t & maxDeviation = data_.second; 28111 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR( 28112 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 28113 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); 28114 28115 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 28116 } 28117 28118 template <typename Dispatch> 28119 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type getCalibratedTimestampKHR(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo,Dispatch const & d) const28120 Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const 28121 { 28122 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28123 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28124 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && 28125 "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 28126 # endif 28127 28128 std::pair<uint64_t, uint64_t> data_; 28129 uint64_t & timestamp = data_.first; 28130 uint64_t & maxDeviation = data_.second; 28131 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 28132 d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); 28133 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); 28134 28135 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 28136 } 28137 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28138 28139 //=== VK_KHR_maintenance6 === 28140 28141 template <typename Dispatch> bindDescriptorSets2KHR(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo,Dispatch const & d) const28142 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, 28143 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28144 { 28145 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28146 d.vkCmdBindDescriptorSets2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 28147 reinterpret_cast<const VkBindDescriptorSetsInfo *>( pBindDescriptorSetsInfo ) ); 28148 } 28149 28150 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28151 template <typename Dispatch> bindDescriptorSets2KHR(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo,Dispatch const & d) const28152 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, 28153 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28154 { 28155 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28156 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28157 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2KHR && "Function <vkCmdBindDescriptorSets2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 28158 # endif 28159 28160 d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfo *>( &bindDescriptorSetsInfo ) ); 28161 } 28162 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28163 28164 template <typename Dispatch> pushConstants2KHR(const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo,Dispatch const & d) const28165 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, 28166 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28167 { 28168 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28169 d.vkCmdPushConstants2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPushConstantsInfo *>( pPushConstantsInfo ) ); 28170 } 28171 28172 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28173 template <typename Dispatch> pushConstants2KHR(const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo,Dispatch const & d) const28174 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, 28175 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28176 { 28177 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28178 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28179 VULKAN_HPP_ASSERT( d.vkCmdPushConstants2KHR && "Function <vkCmdPushConstants2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 28180 # endif 28181 28182 d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfo *>( &pushConstantsInfo ) ); 28183 } 28184 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28185 28186 template <typename Dispatch> pushDescriptorSet2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo,Dispatch const & d) const28187 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, 28188 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28189 { 28190 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28191 d.vkCmdPushDescriptorSet2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 28192 reinterpret_cast<const VkPushDescriptorSetInfo *>( pPushDescriptorSetInfo ) ); 28193 } 28194 28195 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28196 template <typename Dispatch> pushDescriptorSet2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo,Dispatch const & d) const28197 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, 28198 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28199 { 28200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28201 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28202 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2KHR && "Function <vkCmdPushDescriptorSet2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 28203 # endif 28204 28205 d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfo *>( &pushDescriptorSetInfo ) ); 28206 } 28207 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28208 28209 template <typename Dispatch> 28210 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo,Dispatch const & d) const28211 CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, 28212 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28213 { 28214 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28215 d.vkCmdPushDescriptorSetWithTemplate2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), 28216 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( pPushDescriptorSetWithTemplateInfo ) ); 28217 } 28218 28219 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28220 template <typename Dispatch> 28221 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo,Dispatch const & d) const28222 CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, 28223 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28224 { 28225 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28226 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28227 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2KHR && 28228 "Function <vkCmdPushDescriptorSetWithTemplate2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); 28229 # endif 28230 28231 d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, 28232 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( &pushDescriptorSetWithTemplateInfo ) ); 28233 } 28234 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28235 28236 template <typename Dispatch> 28237 VULKAN_HPP_INLINE void setDescriptorBufferOffsets2EXT(const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo,Dispatch const & d) const28238 CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, 28239 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28240 { 28241 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28242 d.vkCmdSetDescriptorBufferOffsets2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 28243 reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) ); 28244 } 28245 28246 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28247 template <typename Dispatch> 28248 VULKAN_HPP_INLINE void setDescriptorBufferOffsets2EXT(const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo,Dispatch const & d) const28249 CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, 28250 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28251 { 28252 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28253 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28254 VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsets2EXT && "Function <vkCmdSetDescriptorBufferOffsets2EXT> requires <VK_KHR_maintenance6>" ); 28255 # endif 28256 28257 d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( &setDescriptorBufferOffsetsInfo ) ); 28258 } 28259 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28260 28261 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplers2EXT(const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo,Dispatch const & d) const28262 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( 28263 const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, 28264 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28265 { 28266 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28267 d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( 28268 static_cast<VkCommandBuffer>( m_commandBuffer ), 28269 reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) ); 28270 } 28271 28272 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28273 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplers2EXT(const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo,Dispatch const & d) const28274 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( 28275 const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, 28276 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28277 { 28278 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28279 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28280 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT && 28281 "Function <vkCmdBindDescriptorBufferEmbeddedSamplers2EXT> requires <VK_KHR_maintenance6>" ); 28282 # endif 28283 28284 d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( 28285 m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( &bindDescriptorBufferEmbeddedSamplersInfo ) ); 28286 } 28287 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28288 28289 //=== VK_EXT_device_generated_commands === 28290 28291 template <typename Dispatch> getGeneratedCommandsMemoryRequirementsEXT(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const28292 VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT * pInfo, 28293 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 28294 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28295 { 28296 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28297 d.vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast<VkDevice>( m_device ), 28298 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( pInfo ), 28299 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 28300 } 28301 28302 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28303 template <typename Dispatch> 28304 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsEXT(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info,Dispatch const & d) const28305 Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, 28306 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28307 { 28308 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28309 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28310 VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && 28311 "Function <vkGetGeneratedCommandsMemoryRequirementsEXT> requires <VK_EXT_device_generated_commands>" ); 28312 # endif 28313 28314 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 28315 d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, 28316 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( &info ), 28317 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 28318 28319 return memoryRequirements; 28320 } 28321 28322 template <typename X, typename Y, typename... Z, typename Dispatch> 28323 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsEXT(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info,Dispatch const & d) const28324 Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, 28325 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28326 { 28327 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28328 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28329 VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && 28330 "Function <vkGetGeneratedCommandsMemoryRequirementsEXT> requires <VK_EXT_device_generated_commands>" ); 28331 # endif 28332 28333 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 28334 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 28335 d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, 28336 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( &info ), 28337 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 28338 28339 return structureChain; 28340 } 28341 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28342 28343 template <typename Dispatch> preprocessGeneratedCommandsEXT(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo,VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer,Dispatch const & d) const28344 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, 28345 VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, 28346 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28347 { 28348 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28349 d.vkCmdPreprocessGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 28350 reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ), 28351 static_cast<VkCommandBuffer>( stateCommandBuffer ) ); 28352 } 28353 28354 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28355 template <typename Dispatch> preprocessGeneratedCommandsEXT(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo,VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer,Dispatch const & d) const28356 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, 28357 VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, 28358 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28359 { 28360 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28361 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28362 VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsEXT && "Function <vkCmdPreprocessGeneratedCommandsEXT> requires <VK_EXT_device_generated_commands>" ); 28363 # endif 28364 28365 d.vkCmdPreprocessGeneratedCommandsEXT( 28366 m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( &generatedCommandsInfo ), static_cast<VkCommandBuffer>( stateCommandBuffer ) ); 28367 } 28368 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28369 28370 template <typename Dispatch> executeGeneratedCommandsEXT(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo,Dispatch const & d) const28371 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 28372 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, 28373 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28374 { 28375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28376 d.vkCmdExecuteGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), 28377 static_cast<VkBool32>( isPreprocessed ), 28378 reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ) ); 28379 } 28380 28381 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28382 template <typename Dispatch> executeGeneratedCommandsEXT(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo,Dispatch const & d) const28383 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 28384 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, 28385 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28386 { 28387 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28388 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28389 VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsEXT && "Function <vkCmdExecuteGeneratedCommandsEXT> requires <VK_EXT_device_generated_commands>" ); 28390 # endif 28391 28392 d.vkCmdExecuteGeneratedCommandsEXT( 28393 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( &generatedCommandsInfo ) ); 28394 } 28395 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28396 28397 template <typename Dispatch> 28398 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createIndirectCommandsLayoutEXT(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT * pIndirectCommandsLayout,Dispatch const & d) const28399 Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT * pCreateInfo, 28400 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 28401 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT * pIndirectCommandsLayout, 28402 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28403 { 28404 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28405 return static_cast<Result>( d.vkCreateIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ), 28406 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( pCreateInfo ), 28407 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 28408 reinterpret_cast<VkIndirectCommandsLayoutEXT *>( pIndirectCommandsLayout ) ) ); 28409 } 28410 28411 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28412 template <typename Dispatch> 28413 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT>::type createIndirectCommandsLayoutEXT(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const28414 Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, 28415 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 28416 Dispatch const & d ) const 28417 { 28418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28419 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28420 VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function <vkCreateIndirectCommandsLayoutEXT> requires <VK_EXT_device_generated_commands>" ); 28421 # endif 28422 28423 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; 28424 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutEXT( 28425 m_device, 28426 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( &createInfo ), 28427 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 28428 reinterpret_cast<VkIndirectCommandsLayoutEXT *>( &indirectCommandsLayout ) ) ); 28429 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXT" ); 28430 28431 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); 28432 } 28433 28434 # ifndef VULKAN_HPP_NO_SMART_HANDLE 28435 template <typename Dispatch> 28436 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT, Dispatch>>::type createIndirectCommandsLayoutEXTUnique(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const28437 Device::createIndirectCommandsLayoutEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, 28438 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 28439 Dispatch const & d ) const 28440 { 28441 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28442 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28443 VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function <vkCreateIndirectCommandsLayoutEXT> requires <VK_EXT_device_generated_commands>" ); 28444 # endif 28445 28446 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; 28447 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutEXT( 28448 m_device, 28449 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( &createInfo ), 28450 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 28451 reinterpret_cast<VkIndirectCommandsLayoutEXT *>( &indirectCommandsLayout ) ) ); 28452 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXTUnique" ); 28453 28454 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 28455 UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT, Dispatch>( 28456 indirectCommandsLayout, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 28457 } 28458 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 28459 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28460 28461 template <typename Dispatch> destroyIndirectCommandsLayoutEXT(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const28462 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, 28463 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 28464 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28465 { 28466 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28467 d.vkDestroyIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ), 28468 static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), 28469 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 28470 } 28471 28472 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28473 template <typename Dispatch> destroyIndirectCommandsLayoutEXT(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const28474 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, 28475 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 28476 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28477 { 28478 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28479 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28480 VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function <vkDestroyIndirectCommandsLayoutEXT> requires <VK_EXT_device_generated_commands>" ); 28481 # endif 28482 28483 d.vkDestroyIndirectCommandsLayoutEXT( 28484 m_device, 28485 static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), 28486 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 28487 } 28488 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28489 28490 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const28491 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, 28492 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 28493 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28494 { 28495 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28496 d.vkDestroyIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ), 28497 static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), 28498 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 28499 } 28500 28501 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28502 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const28503 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, 28504 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 28505 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28506 { 28507 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28508 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28509 VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function <vkDestroyIndirectCommandsLayoutEXT> requires <VK_EXT_device_generated_commands>" ); 28510 # endif 28511 28512 d.vkDestroyIndirectCommandsLayoutEXT( 28513 m_device, 28514 static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), 28515 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 28516 } 28517 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28518 28519 template <typename Dispatch> 28520 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createIndirectExecutionSetEXT(const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT * pIndirectExecutionSet,Dispatch const & d) const28521 Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT * pCreateInfo, 28522 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 28523 VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT * pIndirectExecutionSet, 28524 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28525 { 28526 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28527 return static_cast<Result>( d.vkCreateIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ), 28528 reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( pCreateInfo ), 28529 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 28530 reinterpret_cast<VkIndirectExecutionSetEXT *>( pIndirectExecutionSet ) ) ); 28531 } 28532 28533 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28534 template <typename Dispatch> 28535 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT>::type createIndirectExecutionSetEXT(const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const28536 Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, 28537 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 28538 Dispatch const & d ) const 28539 { 28540 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28541 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28542 VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function <vkCreateIndirectExecutionSetEXT> requires <VK_EXT_device_generated_commands>" ); 28543 # endif 28544 28545 VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; 28546 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectExecutionSetEXT( 28547 m_device, 28548 reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( &createInfo ), 28549 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 28550 reinterpret_cast<VkIndirectExecutionSetEXT *>( &indirectExecutionSet ) ) ); 28551 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXT" ); 28552 28553 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectExecutionSet ) ); 28554 } 28555 28556 # ifndef VULKAN_HPP_NO_SMART_HANDLE 28557 template <typename Dispatch> 28558 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT, Dispatch>>::type createIndirectExecutionSetEXTUnique(const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const28559 Device::createIndirectExecutionSetEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, 28560 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 28561 Dispatch const & d ) const 28562 { 28563 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28564 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28565 VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function <vkCreateIndirectExecutionSetEXT> requires <VK_EXT_device_generated_commands>" ); 28566 # endif 28567 28568 VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; 28569 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectExecutionSetEXT( 28570 m_device, 28571 reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( &createInfo ), 28572 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 28573 reinterpret_cast<VkIndirectExecutionSetEXT *>( &indirectExecutionSet ) ) ); 28574 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXTUnique" ); 28575 28576 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 28577 UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT, Dispatch>( 28578 indirectExecutionSet, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 28579 } 28580 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 28581 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28582 28583 template <typename Dispatch> destroyIndirectExecutionSetEXT(VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const28584 VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, 28585 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 28586 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28587 { 28588 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28589 d.vkDestroyIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ), 28590 static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), 28591 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 28592 } 28593 28594 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28595 template <typename Dispatch> destroyIndirectExecutionSetEXT(VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const28596 VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, 28597 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 28598 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28599 { 28600 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28601 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28602 VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function <vkDestroyIndirectExecutionSetEXT> requires <VK_EXT_device_generated_commands>" ); 28603 # endif 28604 28605 d.vkDestroyIndirectExecutionSetEXT( 28606 m_device, 28607 static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), 28608 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 28609 } 28610 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28611 28612 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const28613 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, 28614 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 28615 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28616 { 28617 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28618 d.vkDestroyIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ), 28619 static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), 28620 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 28621 } 28622 28623 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28624 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const28625 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, 28626 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 28627 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28628 { 28629 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28630 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28631 VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function <vkDestroyIndirectExecutionSetEXT> requires <VK_EXT_device_generated_commands>" ); 28632 # endif 28633 28634 d.vkDestroyIndirectExecutionSetEXT( 28635 m_device, 28636 static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), 28637 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 28638 } 28639 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28640 28641 template <typename Dispatch> updateIndirectExecutionSetPipelineEXT(VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet,uint32_t executionSetWriteCount,const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites,Dispatch const & d) const28642 VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, 28643 uint32_t executionSetWriteCount, 28644 const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites, 28645 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28646 { 28647 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28648 d.vkUpdateIndirectExecutionSetPipelineEXT( static_cast<VkDevice>( m_device ), 28649 static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), 28650 executionSetWriteCount, 28651 reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT *>( pExecutionSetWrites ) ); 28652 } 28653 28654 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28655 template <typename Dispatch> updateIndirectExecutionSetPipelineEXT(VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT> const & executionSetWrites,Dispatch const & d) const28656 VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( 28657 VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, 28658 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT> const & executionSetWrites, 28659 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28660 { 28661 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28662 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28663 VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetPipelineEXT && 28664 "Function <vkUpdateIndirectExecutionSetPipelineEXT> requires <VK_EXT_device_generated_commands>" ); 28665 # endif 28666 28667 d.vkUpdateIndirectExecutionSetPipelineEXT( m_device, 28668 static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), 28669 executionSetWrites.size(), 28670 reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT *>( executionSetWrites.data() ) ); 28671 } 28672 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28673 28674 template <typename Dispatch> updateIndirectExecutionSetShaderEXT(VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet,uint32_t executionSetWriteCount,const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites,Dispatch const & d) const28675 VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, 28676 uint32_t executionSetWriteCount, 28677 const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites, 28678 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28679 { 28680 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28681 d.vkUpdateIndirectExecutionSetShaderEXT( static_cast<VkDevice>( m_device ), 28682 static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), 28683 executionSetWriteCount, 28684 reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT *>( pExecutionSetWrites ) ); 28685 } 28686 28687 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28688 template <typename Dispatch> updateIndirectExecutionSetShaderEXT(VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT> const & executionSetWrites,Dispatch const & d) const28689 VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( 28690 VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, 28691 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT> const & executionSetWrites, 28692 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28693 { 28694 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28695 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28696 VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetShaderEXT && 28697 "Function <vkUpdateIndirectExecutionSetShaderEXT> requires <VK_EXT_device_generated_commands>" ); 28698 # endif 28699 28700 d.vkUpdateIndirectExecutionSetShaderEXT( m_device, 28701 static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), 28702 executionSetWrites.size(), 28703 reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT *>( executionSetWrites.data() ) ); 28704 } 28705 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28706 28707 //=== VK_NV_cooperative_matrix2 === 28708 28709 template <typename Dispatch> 28710 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getCooperativeMatrixFlexibleDimensionsPropertiesNV(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties,Dispatch const & d) const28711 PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, 28712 VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, 28713 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 28714 { 28715 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28716 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( 28717 static_cast<VkPhysicalDevice>( m_physicalDevice ), 28718 pPropertyCount, 28719 reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( pProperties ) ) ); 28720 } 28721 28722 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 28723 template <typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, 28724 typename Dispatch, 28725 typename std::enable_if<std::is_same<typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator::value_type, 28726 VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV>::value, 28727 int>::type> 28728 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 28729 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV, CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator>>::type getCooperativeMatrixFlexibleDimensionsPropertiesNV(Dispatch const & d) const28730 PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d ) const 28731 { 28732 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28733 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28734 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && 28735 "Function <vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV> requires <VK_NV_cooperative_matrix2>" ); 28736 # endif 28737 28738 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV, CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator> properties; 28739 uint32_t propertyCount; 28740 VULKAN_HPP_NAMESPACE::Result result; 28741 do 28742 { 28743 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 28744 d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 28745 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 28746 { 28747 properties.resize( propertyCount ); 28748 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( 28749 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( properties.data() ) ) ); 28750 } 28751 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 28752 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); 28753 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 28754 if ( propertyCount < properties.size() ) 28755 { 28756 properties.resize( propertyCount ); 28757 } 28758 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 28759 } 28760 28761 template <typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, 28762 typename Dispatch, 28763 typename std::enable_if<std::is_same<typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator::value_type, 28764 VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV>::value, 28765 int>::type> 28766 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 28767 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV, CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator>>::type getCooperativeMatrixFlexibleDimensionsPropertiesNV(CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator,Dispatch const & d) const28768 PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( 28769 CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, Dispatch const & d ) const 28770 { 28771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 28772 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 28773 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && 28774 "Function <vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV> requires <VK_NV_cooperative_matrix2>" ); 28775 # endif 28776 28777 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV, CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator> properties( 28778 cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator ); 28779 uint32_t propertyCount; 28780 VULKAN_HPP_NAMESPACE::Result result; 28781 do 28782 { 28783 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 28784 d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 28785 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 28786 { 28787 properties.resize( propertyCount ); 28788 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( 28789 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( properties.data() ) ) ); 28790 } 28791 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 28792 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); 28793 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 28794 if ( propertyCount < properties.size() ) 28795 { 28796 properties.resize( propertyCount ); 28797 } 28798 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 28799 } 28800 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 28801 28802 } // namespace VULKAN_HPP_NAMESPACE 28803 #endif 28804