1 // Copyright 2015-2024 The Khronos Group Inc. 2 // 3 // SPDX-License-Identifier: Apache-2.0 OR MIT 4 // 5 6 // This header is generated from the Khronos Vulkan XML API Registry. 7 8 #ifndef VULKAN_FUNCS_HPP 9 #define VULKAN_FUNCS_HPP 10 11 namespace VULKAN_HPP_NAMESPACE 12 { 13 14 //=========================== 15 //=== COMMAND Definitions === 16 //=========================== 17 18 //=== VK_VERSION_1_0 === 19 20 template <typename Dispatch> createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Instance * pInstance,Dispatch const & d)21 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, 22 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23 VULKAN_HPP_NAMESPACE::Instance * pInstance, 24 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 25 { 26 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27 return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), 28 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 29 reinterpret_cast<VkInstance *>( pInstance ) ) ); 30 } 31 32 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 33 template <typename Dispatch> createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)34 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( 35 const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) 36 { 37 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 38 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 39 VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" ); 40 # endif 41 42 VULKAN_HPP_NAMESPACE::Instance instance; 43 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 44 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 45 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 46 reinterpret_cast<VkInstance *>( &instance ) ) ); 47 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); 48 49 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( instance ) ); 50 } 51 52 # ifndef VULKAN_HPP_NO_SMART_HANDLE 53 template <typename Dispatch> createInstanceUnique(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)54 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( 55 const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) 56 { 57 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 58 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 59 VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" ); 60 # endif 61 62 VULKAN_HPP_NAMESPACE::Instance instance; 63 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 64 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 65 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 66 reinterpret_cast<VkInstance *>( &instance ) ) ); 67 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); 68 69 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 70 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) ); 71 } 72 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 73 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 74 75 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const76 VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 77 { 78 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 79 d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 80 } 81 82 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 83 template <typename Dispatch> destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const84 VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 85 { 86 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 87 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 88 VULKAN_HPP_ASSERT( d.vkDestroyInstance && "Function <vkDestroyInstance> requires <VK_VERSION_1_0>" ); 89 # endif 90 91 d.vkDestroyInstance( m_instance, 92 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 93 } 94 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 95 96 template <typename Dispatch> enumeratePhysicalDevices(uint32_t * pPhysicalDeviceCount,VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,Dispatch const & d) const97 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, 98 VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, 99 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 100 { 101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 102 return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) ); 103 } 104 105 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 106 template <typename PhysicalDeviceAllocator, typename Dispatch> 107 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(Dispatch const & d) const108 Instance::enumeratePhysicalDevices( Dispatch const & d ) const 109 { 110 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 111 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 112 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" ); 113 # endif 114 115 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices; 116 uint32_t physicalDeviceCount; 117 VULKAN_HPP_NAMESPACE::Result result; 118 do 119 { 120 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 121 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) 122 { 123 physicalDevices.resize( physicalDeviceCount ); 124 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 125 d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 126 } 127 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 128 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 129 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 130 if ( physicalDeviceCount < physicalDevices.size() ) 131 { 132 physicalDevices.resize( physicalDeviceCount ); 133 } 134 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); 135 } 136 137 template <typename PhysicalDeviceAllocator, 138 typename Dispatch, 139 typename std::enable_if<std::is_same<typename PhysicalDeviceAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDevice>::value, int>::type> 140 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(PhysicalDeviceAllocator & physicalDeviceAllocator,Dispatch const & d) const141 Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const 142 { 143 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 144 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 145 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" ); 146 # endif 147 148 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator ); 149 uint32_t physicalDeviceCount; 150 VULKAN_HPP_NAMESPACE::Result result; 151 do 152 { 153 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 154 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) 155 { 156 physicalDevices.resize( physicalDeviceCount ); 157 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 158 d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 159 } 160 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 161 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 162 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 163 if ( physicalDeviceCount < physicalDevices.size() ) 164 { 165 physicalDevices.resize( physicalDeviceCount ); 166 } 167 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); 168 } 169 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 170 171 template <typename Dispatch> getFeatures(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,Dispatch const & d) const172 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 173 { 174 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 175 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) ); 176 } 177 178 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 179 template <typename Dispatch> 180 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const & d) const181 PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 182 { 183 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 184 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 185 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures && "Function <vkGetPhysicalDeviceFeatures> requires <VK_VERSION_1_0>" ); 186 # endif 187 188 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; 189 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) ); 190 191 return features; 192 } 193 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 194 195 template <typename Dispatch> getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,Dispatch const & d) const196 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 197 VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, 198 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 199 { 200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 201 d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) ); 202 } 203 204 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 205 template <typename Dispatch> 206 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const207 PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 208 { 209 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 210 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 211 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties && "Function <vkGetPhysicalDeviceFormatProperties> requires <VK_VERSION_1_0>" ); 212 # endif 213 214 VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; 215 d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) ); 216 217 return formatProperties; 218 } 219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 220 221 template <typename Dispatch> getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,Dispatch const & d) const222 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 223 VULKAN_HPP_NAMESPACE::ImageType type, 224 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 225 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 226 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 227 VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, 228 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 229 { 230 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 231 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, 232 static_cast<VkFormat>( format ), 233 static_cast<VkImageType>( type ), 234 static_cast<VkImageTiling>( tiling ), 235 static_cast<VkImageUsageFlags>( usage ), 236 static_cast<VkImageCreateFlags>( flags ), 237 reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) ); 238 } 239 240 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 241 template <typename Dispatch> 242 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,Dispatch const & d) const243 PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 244 VULKAN_HPP_NAMESPACE::ImageType type, 245 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 246 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 247 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 248 Dispatch const & d ) const 249 { 250 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 251 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 252 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties && "Function <vkGetPhysicalDeviceImageFormatProperties> requires <VK_VERSION_1_0>" ); 253 # endif 254 255 VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; 256 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 257 d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, 258 static_cast<VkFormat>( format ), 259 static_cast<VkImageType>( type ), 260 static_cast<VkImageTiling>( tiling ), 261 static_cast<VkImageUsageFlags>( usage ), 262 static_cast<VkImageCreateFlags>( flags ), 263 reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) ); 264 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); 265 266 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 267 } 268 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 269 270 template <typename Dispatch> getProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,Dispatch const & d) const271 VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, 272 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 273 { 274 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 275 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) ); 276 } 277 278 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 279 template <typename Dispatch> 280 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const & d) const281 PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 282 { 283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 284 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 285 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties && "Function <vkGetPhysicalDeviceProperties> requires <VK_VERSION_1_0>" ); 286 # endif 287 288 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; 289 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) ); 290 291 return properties; 292 } 293 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 294 295 template <typename Dispatch> getQueueFamilyProperties(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,Dispatch const & d) const296 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, 297 VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, 298 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 299 { 300 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 301 d.vkGetPhysicalDeviceQueueFamilyProperties( 302 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) ); 303 } 304 305 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 306 template <typename QueueFamilyPropertiesAllocator, typename Dispatch> 307 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(Dispatch const & d) const308 PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const 309 { 310 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 311 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 312 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" ); 313 # endif 314 315 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties; 316 uint32_t queueFamilyPropertyCount; 317 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 318 queueFamilyProperties.resize( queueFamilyPropertyCount ); 319 d.vkGetPhysicalDeviceQueueFamilyProperties( 320 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 321 322 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 323 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 324 { 325 queueFamilyProperties.resize( queueFamilyPropertyCount ); 326 } 327 return queueFamilyProperties; 328 } 329 330 template < 331 typename QueueFamilyPropertiesAllocator, 332 typename Dispatch, 333 typename std::enable_if<std::is_same<typename QueueFamilyPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, int>::type> 334 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,Dispatch const & d) const335 PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const 336 { 337 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 338 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 339 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" ); 340 # endif 341 342 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator ); 343 uint32_t queueFamilyPropertyCount; 344 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 345 queueFamilyProperties.resize( queueFamilyPropertyCount ); 346 d.vkGetPhysicalDeviceQueueFamilyProperties( 347 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 348 349 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 350 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 351 { 352 queueFamilyProperties.resize( queueFamilyPropertyCount ); 353 } 354 return queueFamilyProperties; 355 } 356 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 357 358 template <typename Dispatch> getMemoryProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,Dispatch const & d) const359 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, 360 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 361 { 362 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 363 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) ); 364 } 365 366 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 367 template <typename Dispatch> 368 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const & d) const369 PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 370 { 371 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 372 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 373 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties && "Function <vkGetPhysicalDeviceMemoryProperties> requires <VK_VERSION_1_0>" ); 374 # endif 375 376 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; 377 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) ); 378 379 return memoryProperties; 380 } 381 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 382 383 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const384 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 385 { 386 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 387 return d.vkGetInstanceProcAddr( m_instance, pName ); 388 } 389 390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 391 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const392 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 393 { 394 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 395 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 396 VULKAN_HPP_ASSERT( d.vkGetInstanceProcAddr && "Function <vkGetInstanceProcAddr> requires <VK_VERSION_1_0>" ); 397 # endif 398 399 PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() ); 400 401 return result; 402 } 403 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 404 405 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const406 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 407 { 408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 409 return d.vkGetDeviceProcAddr( m_device, pName ); 410 } 411 412 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 413 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const414 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 415 { 416 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 417 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 418 VULKAN_HPP_ASSERT( d.vkGetDeviceProcAddr && "Function <vkGetDeviceProcAddr> requires <VK_VERSION_1_0>" ); 419 # endif 420 421 PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() ); 422 423 return result; 424 } 425 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 426 427 template <typename Dispatch> createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Device * pDevice,Dispatch const & d) const428 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, 429 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 430 VULKAN_HPP_NAMESPACE::Device * pDevice, 431 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 432 { 433 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 434 return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, 435 reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), 436 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 437 reinterpret_cast<VkDevice *>( pDevice ) ) ); 438 } 439 440 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 441 template <typename Dispatch> createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const442 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( 443 const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 444 { 445 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 446 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 447 VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" ); 448 # endif 449 450 VULKAN_HPP_NAMESPACE::Device device; 451 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 452 d.vkCreateDevice( m_physicalDevice, 453 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 454 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 455 reinterpret_cast<VkDevice *>( &device ) ) ); 456 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); 457 458 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( device ) ); 459 } 460 461 # ifndef VULKAN_HPP_NO_SMART_HANDLE 462 template <typename Dispatch> 463 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type createDeviceUnique(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const464 PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, 465 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 466 Dispatch const & d ) const 467 { 468 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 469 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 470 VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" ); 471 # endif 472 473 VULKAN_HPP_NAMESPACE::Device device; 474 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 475 d.vkCreateDevice( m_physicalDevice, 476 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 477 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 478 reinterpret_cast<VkDevice *>( &device ) ) ); 479 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); 480 481 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 482 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) ); 483 } 484 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 485 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 486 487 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const488 VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 489 { 490 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 491 d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 492 } 493 494 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 495 template <typename Dispatch> destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const496 VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 497 { 498 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 499 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 500 VULKAN_HPP_ASSERT( d.vkDestroyDevice && "Function <vkDestroyDevice> requires <VK_VERSION_1_0>" ); 501 # endif 502 503 d.vkDestroyDevice( m_device, 504 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 505 } 506 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 507 508 template <typename Dispatch> enumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d)509 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, 510 uint32_t * pPropertyCount, 511 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 512 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 513 { 514 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 515 return static_cast<Result>( 516 d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 517 } 518 519 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 520 template <typename ExtensionPropertiesAllocator, typename Dispatch> 521 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d)522 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) 523 { 524 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 525 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 526 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" ); 527 # endif 528 529 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; 530 uint32_t propertyCount; 531 VULKAN_HPP_NAMESPACE::Result result; 532 do 533 { 534 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 535 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 536 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 537 { 538 properties.resize( propertyCount ); 539 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties( 540 layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 541 } 542 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 543 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 544 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 545 if ( propertyCount < properties.size() ) 546 { 547 properties.resize( propertyCount ); 548 } 549 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 550 } 551 552 template < 553 typename ExtensionPropertiesAllocator, 554 typename Dispatch, 555 typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 556 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d)557 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, 558 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 559 Dispatch const & d ) 560 { 561 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 562 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 563 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" ); 564 # endif 565 566 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 567 uint32_t propertyCount; 568 VULKAN_HPP_NAMESPACE::Result result; 569 do 570 { 571 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 572 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 573 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 574 { 575 properties.resize( propertyCount ); 576 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties( 577 layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 578 } 579 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 580 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 581 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 582 if ( propertyCount < properties.size() ) 583 { 584 properties.resize( propertyCount ); 585 } 586 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 587 } 588 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 589 590 template <typename Dispatch> enumerateDeviceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d) const591 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, 592 uint32_t * pPropertyCount, 593 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 594 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 595 { 596 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 597 return static_cast<Result>( 598 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 599 } 600 601 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 602 template <typename ExtensionPropertiesAllocator, typename Dispatch> 603 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d) const604 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const 605 { 606 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 607 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 608 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" ); 609 # endif 610 611 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; 612 uint32_t propertyCount; 613 VULKAN_HPP_NAMESPACE::Result result; 614 do 615 { 616 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 617 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 618 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 619 { 620 properties.resize( propertyCount ); 621 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties( 622 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 623 } 624 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 625 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 626 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 627 if ( propertyCount < properties.size() ) 628 { 629 properties.resize( propertyCount ); 630 } 631 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 632 } 633 634 template < 635 typename ExtensionPropertiesAllocator, 636 typename Dispatch, 637 typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 638 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d) const639 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, 640 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 641 Dispatch const & d ) const 642 { 643 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 644 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 645 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" ); 646 # endif 647 648 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 649 uint32_t propertyCount; 650 VULKAN_HPP_NAMESPACE::Result result; 651 do 652 { 653 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 654 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 655 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 656 { 657 properties.resize( propertyCount ); 658 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties( 659 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 660 } 661 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 662 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 663 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 664 if ( propertyCount < properties.size() ) 665 { 666 properties.resize( propertyCount ); 667 } 668 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 669 } 670 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 671 672 template <typename Dispatch> enumerateInstanceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d)673 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, 674 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 675 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 676 { 677 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 678 return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 679 } 680 681 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 682 template <typename LayerPropertiesAllocator, typename Dispatch> 683 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(Dispatch const & d)684 enumerateInstanceLayerProperties( Dispatch const & d ) 685 { 686 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 687 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 688 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" ); 689 # endif 690 691 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; 692 uint32_t propertyCount; 693 VULKAN_HPP_NAMESPACE::Result result; 694 do 695 { 696 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 697 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 698 { 699 properties.resize( propertyCount ); 700 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 701 d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 702 } 703 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 704 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 705 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 706 if ( propertyCount < properties.size() ) 707 { 708 properties.resize( propertyCount ); 709 } 710 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 711 } 712 713 template <typename LayerPropertiesAllocator, 714 typename Dispatch, 715 typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 716 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d)717 enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) 718 { 719 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 720 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 721 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" ); 722 # endif 723 724 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 725 uint32_t propertyCount; 726 VULKAN_HPP_NAMESPACE::Result result; 727 do 728 { 729 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 730 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 731 { 732 properties.resize( propertyCount ); 733 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 734 d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 735 } 736 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 737 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 738 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 739 if ( propertyCount < properties.size() ) 740 { 741 properties.resize( propertyCount ); 742 } 743 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 744 } 745 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 746 747 template <typename Dispatch> enumerateDeviceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d) const748 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, 749 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 750 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 751 { 752 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 753 return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 754 } 755 756 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 757 template <typename LayerPropertiesAllocator, typename Dispatch> 758 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(Dispatch const & d) const759 PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const 760 { 761 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 762 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 763 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" ); 764 # endif 765 766 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; 767 uint32_t propertyCount; 768 VULKAN_HPP_NAMESPACE::Result result; 769 do 770 { 771 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 772 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 773 { 774 properties.resize( propertyCount ); 775 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 776 d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 777 } 778 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 779 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 780 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 781 if ( propertyCount < properties.size() ) 782 { 783 properties.resize( propertyCount ); 784 } 785 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 786 } 787 788 template <typename LayerPropertiesAllocator, 789 typename Dispatch, 790 typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 791 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d) const792 PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const 793 { 794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 795 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 796 VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" ); 797 # endif 798 799 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 800 uint32_t propertyCount; 801 VULKAN_HPP_NAMESPACE::Result result; 802 do 803 { 804 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 805 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 806 { 807 properties.resize( propertyCount ); 808 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 809 d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 810 } 811 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 812 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 813 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 814 if ( propertyCount < properties.size() ) 815 { 816 properties.resize( propertyCount ); 817 } 818 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 819 } 820 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 821 822 template <typename Dispatch> 823 VULKAN_HPP_INLINE void getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const824 Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 825 { 826 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 827 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) ); 828 } 829 830 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 831 template <typename Dispatch> 832 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,Dispatch const & d) const833 Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 834 { 835 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 836 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 837 VULKAN_HPP_ASSERT( d.vkGetDeviceQueue && "Function <vkGetDeviceQueue> requires <VK_VERSION_1_0>" ); 838 # endif 839 840 VULKAN_HPP_NAMESPACE::Queue queue; 841 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) ); 842 843 return queue; 844 } 845 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 846 847 template <typename Dispatch> submit(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const848 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, 849 const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, 850 VULKAN_HPP_NAMESPACE::Fence fence, 851 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 852 { 853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 854 return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 855 } 856 857 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 858 template <typename Dispatch> submit(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const859 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( 860 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 861 { 862 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 863 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 864 VULKAN_HPP_ASSERT( d.vkQueueSubmit && "Function <vkQueueSubmit> requires <VK_VERSION_1_0>" ); 865 # endif 866 867 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 868 d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 869 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); 870 871 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 872 } 873 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 874 875 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 876 template <typename Dispatch> waitIdle(Dispatch const & d) const877 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 878 { 879 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 880 return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) ); 881 } 882 #else 883 template <typename Dispatch> waitIdle(Dispatch const & d) const884 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const 885 { 886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 887 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 888 VULKAN_HPP_ASSERT( d.vkQueueWaitIdle && "Function <vkQueueWaitIdle> requires <VK_VERSION_1_0>" ); 889 # endif 890 891 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueueWaitIdle( m_queue ) ); 892 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); 893 894 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 895 } 896 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 897 898 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 899 template <typename Dispatch> waitIdle(Dispatch const & d) const900 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 901 { 902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 903 return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) ); 904 } 905 #else 906 template <typename Dispatch> waitIdle(Dispatch const & d) const907 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const 908 { 909 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 910 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 911 VULKAN_HPP_ASSERT( d.vkDeviceWaitIdle && "Function <vkDeviceWaitIdle> requires <VK_VERSION_1_0>" ); 912 # endif 913 914 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeviceWaitIdle( m_device ) ); 915 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); 916 917 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 918 } 919 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 920 921 template <typename Dispatch> allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,Dispatch const & d) const922 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, 923 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 924 VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, 925 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 926 { 927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 928 return static_cast<Result>( d.vkAllocateMemory( m_device, 929 reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), 930 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 931 reinterpret_cast<VkDeviceMemory *>( pMemory ) ) ); 932 } 933 934 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 935 template <typename Dispatch> 936 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const937 Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, 938 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 939 Dispatch const & d ) const 940 { 941 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 942 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 943 VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" ); 944 # endif 945 946 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 947 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 948 d.vkAllocateMemory( m_device, 949 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 950 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 951 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 952 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); 953 954 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memory ) ); 955 } 956 957 # ifndef VULKAN_HPP_NO_SMART_HANDLE 958 template <typename Dispatch> 959 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type allocateMemoryUnique(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const960 Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, 961 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 962 Dispatch const & d ) const 963 { 964 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 965 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 966 VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" ); 967 # endif 968 969 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 970 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 971 d.vkAllocateMemory( m_device, 972 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 973 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 974 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 975 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); 976 977 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 978 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) ); 979 } 980 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 981 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 982 983 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const984 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 985 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 986 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 987 { 988 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 989 d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 990 } 991 992 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 993 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const994 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 995 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 996 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 997 { 998 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 999 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1000 VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" ); 1001 # endif 1002 1003 d.vkFreeMemory( m_device, 1004 static_cast<VkDeviceMemory>( memory ), 1005 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1006 } 1007 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1008 1009 template <typename Dispatch> 1010 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1011 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1012 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1013 { 1014 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1015 d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1016 } 1017 1018 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1019 template <typename Dispatch> 1020 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1021 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1022 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1023 { 1024 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1025 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1026 VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" ); 1027 # endif 1028 1029 d.vkFreeMemory( m_device, 1030 static_cast<VkDeviceMemory>( memory ), 1031 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1032 } 1033 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1034 1035 template <typename Dispatch> mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,void ** ppData,Dispatch const & d) const1036 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1037 VULKAN_HPP_NAMESPACE::DeviceSize offset, 1038 VULKAN_HPP_NAMESPACE::DeviceSize size, 1039 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 1040 void ** ppData, 1041 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1042 { 1043 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1044 return static_cast<Result>( d.vkMapMemory( m_device, 1045 static_cast<VkDeviceMemory>( memory ), 1046 static_cast<VkDeviceSize>( offset ), 1047 static_cast<VkDeviceSize>( size ), 1048 static_cast<VkMemoryMapFlags>( flags ), 1049 ppData ) ); 1050 } 1051 1052 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1053 template <typename Dispatch> mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,Dispatch const & d) const1054 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1055 VULKAN_HPP_NAMESPACE::DeviceSize offset, 1056 VULKAN_HPP_NAMESPACE::DeviceSize size, 1057 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 1058 Dispatch const & d ) const 1059 { 1060 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1061 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1062 VULKAN_HPP_ASSERT( d.vkMapMemory && "Function <vkMapMemory> requires <VK_VERSION_1_0>" ); 1063 # endif 1064 1065 void * pData; 1066 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory( m_device, 1067 static_cast<VkDeviceMemory>( memory ), 1068 static_cast<VkDeviceSize>( offset ), 1069 static_cast<VkDeviceSize>( size ), 1070 static_cast<VkMemoryMapFlags>( flags ), 1071 &pData ) ); 1072 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); 1073 1074 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); 1075 } 1076 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1077 1078 template <typename Dispatch> unmapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const1079 VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1080 { 1081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1082 d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) ); 1083 } 1084 1085 template <typename Dispatch> flushMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const1086 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, 1087 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 1088 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1089 { 1090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1091 return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 1092 } 1093 1094 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1095 template <typename Dispatch> 1096 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type flushMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const1097 Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, 1098 Dispatch const & d ) const 1099 { 1100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1101 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1102 VULKAN_HPP_ASSERT( d.vkFlushMappedMemoryRanges && "Function <vkFlushMappedMemoryRanges> requires <VK_VERSION_1_0>" ); 1103 # endif 1104 1105 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1106 d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 1107 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); 1108 1109 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1110 } 1111 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1112 1113 template <typename Dispatch> invalidateMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const1114 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, 1115 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 1116 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1117 { 1118 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1119 return static_cast<Result>( 1120 d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 1121 } 1122 1123 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1124 template <typename Dispatch> 1125 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type invalidateMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const1126 Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, 1127 Dispatch const & d ) const 1128 { 1129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1130 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1131 VULKAN_HPP_ASSERT( d.vkInvalidateMappedMemoryRanges && "Function <vkInvalidateMappedMemoryRanges> requires <VK_VERSION_1_0>" ); 1132 # endif 1133 1134 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1135 d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 1136 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); 1137 1138 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1139 } 1140 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1141 1142 template <typename Dispatch> getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,Dispatch const & d) const1143 VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1144 VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, 1145 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1146 { 1147 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1148 d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) ); 1149 } 1150 1151 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1152 template <typename Dispatch> getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const1153 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1154 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1155 { 1156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1157 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1158 VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryCommitment && "Function <vkGetDeviceMemoryCommitment> requires <VK_VERSION_1_0>" ); 1159 # endif 1160 1161 VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; 1162 d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) ); 1163 1164 return committedMemoryInBytes; 1165 } 1166 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1167 1168 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1169 template <typename Dispatch> bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1170 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, 1171 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1172 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1173 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1174 { 1175 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1176 return static_cast<Result>( 1177 d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1178 } 1179 #else 1180 template <typename Dispatch> bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1181 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( 1182 VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const 1183 { 1184 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1185 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1186 VULKAN_HPP_ASSERT( d.vkBindBufferMemory && "Function <vkBindBufferMemory> requires <VK_VERSION_1_0>" ); 1187 # endif 1188 1189 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1190 d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1191 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); 1192 1193 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1194 } 1195 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1196 1197 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1198 template <typename Dispatch> bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1199 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, 1200 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1201 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1202 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1203 { 1204 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1205 return static_cast<Result>( 1206 d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1207 } 1208 #else 1209 template <typename Dispatch> bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1210 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( 1211 VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const 1212 { 1213 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1214 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1215 VULKAN_HPP_ASSERT( d.vkBindImageMemory && "Function <vkBindImageMemory> requires <VK_VERSION_1_0>" ); 1216 # endif 1217 1218 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1219 d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1220 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); 1221 1222 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1223 } 1224 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1225 1226 template <typename Dispatch> getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1227 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, 1228 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1229 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1230 { 1231 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1232 d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1233 } 1234 1235 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1236 template <typename Dispatch> 1237 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,Dispatch const & d) const1238 Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1239 { 1240 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1241 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1242 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements && "Function <vkGetBufferMemoryRequirements> requires <VK_VERSION_1_0>" ); 1243 # endif 1244 1245 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1246 d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1247 1248 return memoryRequirements; 1249 } 1250 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1251 1252 template <typename Dispatch> getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1253 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1254 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1255 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1256 { 1257 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1258 d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1259 } 1260 1261 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1262 template <typename Dispatch> 1263 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1264 Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1265 { 1266 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1267 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1268 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements && "Function <vkGetImageMemoryRequirements> requires <VK_VERSION_1_0>" ); 1269 # endif 1270 1271 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1272 d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1273 1274 return memoryRequirements; 1275 } 1276 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1277 1278 template <typename Dispatch> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,Dispatch const & d) const1279 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1280 uint32_t * pSparseMemoryRequirementCount, 1281 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, 1282 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1283 { 1284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1285 d.vkGetImageSparseMemoryRequirements( m_device, 1286 static_cast<VkImage>( image ), 1287 pSparseMemoryRequirementCount, 1288 reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) ); 1289 } 1290 1291 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1292 template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch> 1293 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1294 Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 1295 { 1296 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1297 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1298 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" ); 1299 # endif 1300 1301 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements; 1302 uint32_t sparseMemoryRequirementCount; 1303 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1304 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1305 d.vkGetImageSparseMemoryRequirements( m_device, 1306 static_cast<VkImage>( image ), 1307 &sparseMemoryRequirementCount, 1308 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1309 1310 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1311 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 1312 { 1313 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1314 } 1315 return sparseMemoryRequirements; 1316 } 1317 1318 template <typename SparseImageMemoryRequirementsAllocator, 1319 typename Dispatch, 1320 typename std::enable_if< 1321 std::is_same<typename SparseImageMemoryRequirementsAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, 1322 int>::type> 1323 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,Dispatch const & d) const1324 Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1325 SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, 1326 Dispatch const & d ) const 1327 { 1328 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1329 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1330 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" ); 1331 # endif 1332 1333 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( 1334 sparseImageMemoryRequirementsAllocator ); 1335 uint32_t sparseMemoryRequirementCount; 1336 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1337 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1338 d.vkGetImageSparseMemoryRequirements( m_device, 1339 static_cast<VkImage>( image ), 1340 &sparseMemoryRequirementCount, 1341 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1342 1343 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1344 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 1345 { 1346 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1347 } 1348 return sparseMemoryRequirements; 1349 } 1350 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1351 1352 template <typename Dispatch> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,Dispatch const & d) const1353 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1354 VULKAN_HPP_NAMESPACE::ImageType type, 1355 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1356 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1357 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1358 uint32_t * pPropertyCount, 1359 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, 1360 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1361 { 1362 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1363 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1364 static_cast<VkFormat>( format ), 1365 static_cast<VkImageType>( type ), 1366 static_cast<VkSampleCountFlagBits>( samples ), 1367 static_cast<VkImageUsageFlags>( usage ), 1368 static_cast<VkImageTiling>( tiling ), 1369 pPropertyCount, 1370 reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) ); 1371 } 1372 1373 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1374 template <typename SparseImageFormatPropertiesAllocator, typename Dispatch> 1375 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,Dispatch const & d) const1376 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1377 VULKAN_HPP_NAMESPACE::ImageType type, 1378 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1379 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1380 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1381 Dispatch const & d ) const 1382 { 1383 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1384 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1385 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && 1386 "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" ); 1387 # endif 1388 1389 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties; 1390 uint32_t propertyCount; 1391 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1392 static_cast<VkFormat>( format ), 1393 static_cast<VkImageType>( type ), 1394 static_cast<VkSampleCountFlagBits>( samples ), 1395 static_cast<VkImageUsageFlags>( usage ), 1396 static_cast<VkImageTiling>( tiling ), 1397 &propertyCount, 1398 nullptr ); 1399 properties.resize( propertyCount ); 1400 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1401 static_cast<VkFormat>( format ), 1402 static_cast<VkImageType>( type ), 1403 static_cast<VkSampleCountFlagBits>( samples ), 1404 static_cast<VkImageUsageFlags>( usage ), 1405 static_cast<VkImageTiling>( tiling ), 1406 &propertyCount, 1407 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1408 1409 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1410 if ( propertyCount < properties.size() ) 1411 { 1412 properties.resize( propertyCount ); 1413 } 1414 return properties; 1415 } 1416 1417 template < 1418 typename SparseImageFormatPropertiesAllocator, 1419 typename Dispatch, 1420 typename std::enable_if<std::is_same<typename SparseImageFormatPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, 1421 int>::type> 1422 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,Dispatch const & d) const1423 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1424 VULKAN_HPP_NAMESPACE::ImageType type, 1425 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1426 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1427 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1428 SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, 1429 Dispatch const & d ) const 1430 { 1431 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1432 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1433 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && 1434 "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" ); 1435 # endif 1436 1437 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator ); 1438 uint32_t propertyCount; 1439 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1440 static_cast<VkFormat>( format ), 1441 static_cast<VkImageType>( type ), 1442 static_cast<VkSampleCountFlagBits>( samples ), 1443 static_cast<VkImageUsageFlags>( usage ), 1444 static_cast<VkImageTiling>( tiling ), 1445 &propertyCount, 1446 nullptr ); 1447 properties.resize( propertyCount ); 1448 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1449 static_cast<VkFormat>( format ), 1450 static_cast<VkImageType>( type ), 1451 static_cast<VkSampleCountFlagBits>( samples ), 1452 static_cast<VkImageUsageFlags>( usage ), 1453 static_cast<VkImageTiling>( tiling ), 1454 &propertyCount, 1455 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1456 1457 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1458 if ( propertyCount < properties.size() ) 1459 { 1460 properties.resize( propertyCount ); 1461 } 1462 return properties; 1463 } 1464 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1465 1466 template <typename Dispatch> bindSparse(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1467 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, 1468 const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, 1469 VULKAN_HPP_NAMESPACE::Fence fence, 1470 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1471 { 1472 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1473 return static_cast<Result>( 1474 d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) ); 1475 } 1476 1477 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1478 template <typename Dispatch> bindSparse(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1479 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( 1480 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 1481 { 1482 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1483 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1484 VULKAN_HPP_ASSERT( d.vkQueueBindSparse && "Function <vkQueueBindSparse> requires <VK_VERSION_1_0>" ); 1485 # endif 1486 1487 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1488 d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ) ); 1489 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); 1490 1491 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1492 } 1493 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1494 1495 template <typename Dispatch> createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const1496 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, 1497 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1498 VULKAN_HPP_NAMESPACE::Fence * pFence, 1499 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1500 { 1501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1502 return static_cast<Result>( d.vkCreateFence( m_device, 1503 reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), 1504 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1505 reinterpret_cast<VkFence *>( pFence ) ) ); 1506 } 1507 1508 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1509 template <typename Dispatch> createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1510 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( 1511 const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1512 { 1513 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1514 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1515 VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" ); 1516 # endif 1517 1518 VULKAN_HPP_NAMESPACE::Fence fence; 1519 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1520 d.vkCreateFence( m_device, 1521 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1522 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1523 reinterpret_cast<VkFence *>( &fence ) ) ); 1524 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); 1525 1526 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); 1527 } 1528 1529 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1530 template <typename Dispatch> createFenceUnique(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1531 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( 1532 const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1533 { 1534 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1535 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1536 VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" ); 1537 # endif 1538 1539 VULKAN_HPP_NAMESPACE::Fence fence; 1540 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1541 d.vkCreateFence( m_device, 1542 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1543 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1544 reinterpret_cast<VkFence *>( &fence ) ) ); 1545 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); 1546 1547 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1548 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1549 } 1550 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1551 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1552 1553 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1554 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1555 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1557 { 1558 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1559 d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1560 } 1561 1562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1563 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1564 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1565 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1566 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1567 { 1568 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1569 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1570 VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" ); 1571 # endif 1572 1573 d.vkDestroyFence( m_device, 1574 static_cast<VkFence>( fence ), 1575 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1576 } 1577 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1578 1579 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1580 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1581 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1582 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1583 { 1584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1585 d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1586 } 1587 1588 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1589 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1590 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1591 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1592 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1593 { 1594 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1595 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1596 VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" ); 1597 # endif 1598 1599 d.vkDestroyFence( m_device, 1600 static_cast<VkFence>( fence ), 1601 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1602 } 1603 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1604 1605 template <typename Dispatch> resetFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,Dispatch const & d) const1606 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, 1607 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1608 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1609 { 1610 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1611 return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) ); 1612 } 1613 1614 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1615 template <typename Dispatch> 1616 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,Dispatch const & d) const1617 Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const 1618 { 1619 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1620 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1621 VULKAN_HPP_ASSERT( d.vkResetFences && "Function <vkResetFences> requires <VK_VERSION_1_0>" ); 1622 # endif 1623 1624 VULKAN_HPP_NAMESPACE::Result result = 1625 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) ); 1626 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); 1627 1628 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1629 } 1630 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1631 1632 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1633 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1634 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1635 { 1636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1637 return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); 1638 } 1639 #else 1640 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1641 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 1642 { 1643 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1644 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1645 VULKAN_HPP_ASSERT( d.vkGetFenceStatus && "Function <vkGetFenceStatus> requires <VK_VERSION_1_0>" ); 1646 # endif 1647 1648 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); 1649 VULKAN_HPP_NAMESPACE::detail::resultCheck( 1650 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1651 1652 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1653 } 1654 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1655 1656 template <typename Dispatch> waitForFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1657 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, 1658 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1659 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1660 uint64_t timeout, 1661 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1662 { 1663 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1664 return static_cast<Result>( 1665 d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) ); 1666 } 1667 1668 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1669 template <typename Dispatch> 1670 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitForFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1671 Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, 1672 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1673 uint64_t timeout, 1674 Dispatch const & d ) const 1675 { 1676 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1677 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1678 VULKAN_HPP_ASSERT( d.vkWaitForFences && "Function <vkWaitForFences> requires <VK_VERSION_1_0>" ); 1679 # endif 1680 1681 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1682 d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) ); 1683 VULKAN_HPP_NAMESPACE::detail::resultCheck( 1684 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 1685 1686 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1687 } 1688 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1689 1690 template <typename Dispatch> createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,Dispatch const & d) const1691 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, 1692 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1693 VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, 1694 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1695 { 1696 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1697 return static_cast<Result>( d.vkCreateSemaphore( m_device, 1698 reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), 1699 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1700 reinterpret_cast<VkSemaphore *>( pSemaphore ) ) ); 1701 } 1702 1703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1704 template <typename Dispatch> 1705 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1706 Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, 1707 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1708 Dispatch const & d ) const 1709 { 1710 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1711 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1712 VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" ); 1713 # endif 1714 1715 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1716 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1717 d.vkCreateSemaphore( m_device, 1718 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1719 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1720 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1721 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); 1722 1723 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( semaphore ) ); 1724 } 1725 1726 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1727 template <typename Dispatch> 1728 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type createSemaphoreUnique(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1729 Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, 1730 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1731 Dispatch const & d ) const 1732 { 1733 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1734 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1735 VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" ); 1736 # endif 1737 1738 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1739 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1740 d.vkCreateSemaphore( m_device, 1741 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1742 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1743 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1744 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); 1745 1746 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1747 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1748 } 1749 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1750 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1751 1752 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1753 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1754 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1755 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1756 { 1757 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1758 d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1759 } 1760 1761 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1762 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1763 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1764 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1765 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1766 { 1767 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1768 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1769 VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" ); 1770 # endif 1771 1772 d.vkDestroySemaphore( m_device, 1773 static_cast<VkSemaphore>( semaphore ), 1774 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1775 } 1776 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1777 1778 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1779 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1780 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1781 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1782 { 1783 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1784 d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1785 } 1786 1787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1788 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1789 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1790 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1791 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1792 { 1793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1794 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1795 VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" ); 1796 # endif 1797 1798 d.vkDestroySemaphore( m_device, 1799 static_cast<VkSemaphore>( semaphore ), 1800 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1801 } 1802 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1803 1804 template <typename Dispatch> createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Event * pEvent,Dispatch const & d) const1805 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, 1806 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1807 VULKAN_HPP_NAMESPACE::Event * pEvent, 1808 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1809 { 1810 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1811 return static_cast<Result>( d.vkCreateEvent( m_device, 1812 reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), 1813 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1814 reinterpret_cast<VkEvent *>( pEvent ) ) ); 1815 } 1816 1817 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1818 template <typename Dispatch> createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1819 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( 1820 const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1821 { 1822 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1823 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1824 VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" ); 1825 # endif 1826 1827 VULKAN_HPP_NAMESPACE::Event event; 1828 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1829 d.vkCreateEvent( m_device, 1830 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1831 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1832 reinterpret_cast<VkEvent *>( &event ) ) ); 1833 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); 1834 1835 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( event ) ); 1836 } 1837 1838 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1839 template <typename Dispatch> createEventUnique(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1840 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( 1841 const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1842 { 1843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1844 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1845 VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" ); 1846 # endif 1847 1848 VULKAN_HPP_NAMESPACE::Event event; 1849 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1850 d.vkCreateEvent( m_device, 1851 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1852 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1853 reinterpret_cast<VkEvent *>( &event ) ) ); 1854 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); 1855 1856 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 1857 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1858 } 1859 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1860 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1861 1862 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1863 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1864 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1865 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1866 { 1867 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1868 d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1869 } 1870 1871 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1872 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1873 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1874 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1875 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1876 { 1877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1878 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1879 VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" ); 1880 # endif 1881 1882 d.vkDestroyEvent( m_device, 1883 static_cast<VkEvent>( event ), 1884 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1885 } 1886 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1887 1888 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1889 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1890 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1891 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1892 { 1893 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1894 d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1895 } 1896 1897 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1898 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1899 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1900 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1901 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1902 { 1903 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1904 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1905 VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" ); 1906 # endif 1907 1908 d.vkDestroyEvent( m_device, 1909 static_cast<VkEvent>( event ), 1910 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1911 } 1912 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1913 1914 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1915 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1916 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1917 { 1918 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1919 return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); 1920 } 1921 #else 1922 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1923 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const 1924 { 1925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1926 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1927 VULKAN_HPP_ASSERT( d.vkGetEventStatus && "Function <vkGetEventStatus> requires <VK_VERSION_1_0>" ); 1928 # endif 1929 1930 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); 1931 VULKAN_HPP_NAMESPACE::detail::resultCheck( 1932 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); 1933 1934 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1935 } 1936 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1937 1938 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1939 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1940 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1941 { 1942 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1943 return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1944 } 1945 #else 1946 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1947 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, 1948 Dispatch const & d ) const 1949 { 1950 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1951 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1952 VULKAN_HPP_ASSERT( d.vkSetEvent && "Function <vkSetEvent> requires <VK_VERSION_1_0>" ); 1953 # endif 1954 1955 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1956 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); 1957 1958 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1959 } 1960 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1961 1962 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1963 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1964 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1965 { 1966 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1967 return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1968 } 1969 #else 1970 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1971 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const 1972 { 1973 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1974 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 1975 VULKAN_HPP_ASSERT( d.vkResetEvent && "Function <vkResetEvent> requires <VK_VERSION_1_0>" ); 1976 # endif 1977 1978 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1979 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); 1980 1981 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 1982 } 1983 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1984 1985 template <typename Dispatch> createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,Dispatch const & d) const1986 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, 1987 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1988 VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, 1989 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1990 { 1991 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1992 return static_cast<Result>( d.vkCreateQueryPool( m_device, 1993 reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), 1994 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1995 reinterpret_cast<VkQueryPool *>( pQueryPool ) ) ); 1996 } 1997 1998 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1999 template <typename Dispatch> 2000 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2001 Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, 2002 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2003 Dispatch const & d ) const 2004 { 2005 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2006 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2007 VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" ); 2008 # endif 2009 2010 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 2011 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2012 d.vkCreateQueryPool( m_device, 2013 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 2014 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2015 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 2016 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); 2017 2018 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( queryPool ) ); 2019 } 2020 2021 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2022 template <typename Dispatch> 2023 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type createQueryPoolUnique(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2024 Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, 2025 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2026 Dispatch const & d ) const 2027 { 2028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2029 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2030 VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" ); 2031 # endif 2032 2033 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 2034 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2035 d.vkCreateQueryPool( m_device, 2036 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 2037 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2038 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 2039 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); 2040 2041 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2042 result, UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2043 } 2044 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2045 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2046 2047 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2048 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2049 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2050 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2051 { 2052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2053 d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2054 } 2055 2056 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2057 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2058 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2059 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2060 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2061 { 2062 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2063 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2064 VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" ); 2065 # endif 2066 2067 d.vkDestroyQueryPool( m_device, 2068 static_cast<VkQueryPool>( queryPool ), 2069 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2070 } 2071 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2072 2073 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2074 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2075 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2076 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2077 { 2078 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2079 d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2080 } 2081 2082 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2083 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2084 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2085 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2086 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2087 { 2088 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2089 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2090 VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" ); 2091 # endif 2092 2093 d.vkDestroyQueryPool( m_device, 2094 static_cast<VkQueryPool>( queryPool ), 2095 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2096 } 2097 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2098 2099 template <typename Dispatch> getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const2100 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2101 uint32_t firstQuery, 2102 uint32_t queryCount, 2103 size_t dataSize, 2104 void * pData, 2105 VULKAN_HPP_NAMESPACE::DeviceSize stride, 2106 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 2107 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2108 { 2109 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2110 return static_cast<Result>( d.vkGetQueryPoolResults( m_device, 2111 static_cast<VkQueryPool>( queryPool ), 2112 firstQuery, 2113 queryCount, 2114 dataSize, 2115 pData, 2116 static_cast<VkDeviceSize>( stride ), 2117 static_cast<VkQueryResultFlags>( flags ) ) ); 2118 } 2119 2120 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2121 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 2122 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>> getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const2123 Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2124 uint32_t firstQuery, 2125 uint32_t queryCount, 2126 size_t dataSize, 2127 VULKAN_HPP_NAMESPACE::DeviceSize stride, 2128 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 2129 Dispatch const & d ) const 2130 { 2131 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2132 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2133 VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" ); 2134 # endif 2135 2136 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 2137 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 2138 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device, 2139 static_cast<VkQueryPool>( queryPool ), 2140 firstQuery, 2141 queryCount, 2142 data.size() * sizeof( DataType ), 2143 reinterpret_cast<void *>( data.data() ), 2144 static_cast<VkDeviceSize>( stride ), 2145 static_cast<VkQueryResultFlags>( flags ) ) ); 2146 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 2147 VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", 2148 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 2149 2150 return ResultValue<std::vector<DataType, DataTypeAllocator>>( result, std::move( data ) ); 2151 } 2152 2153 template <typename DataType, typename Dispatch> getQueryPoolResult(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const2154 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 2155 uint32_t firstQuery, 2156 uint32_t queryCount, 2157 VULKAN_HPP_NAMESPACE::DeviceSize stride, 2158 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 2159 Dispatch const & d ) const 2160 { 2161 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2162 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2163 VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" ); 2164 # endif 2165 2166 DataType data; 2167 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device, 2168 static_cast<VkQueryPool>( queryPool ), 2169 firstQuery, 2170 queryCount, 2171 sizeof( DataType ), 2172 reinterpret_cast<void *>( &data ), 2173 static_cast<VkDeviceSize>( stride ), 2174 static_cast<VkQueryResultFlags>( flags ) ) ); 2175 VULKAN_HPP_NAMESPACE::detail::resultCheck( 2176 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 2177 2178 return ResultValue<DataType>( result, std::move( data ) ); 2179 } 2180 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2181 2182 template <typename Dispatch> createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Buffer * pBuffer,Dispatch const & d) const2183 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, 2184 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2185 VULKAN_HPP_NAMESPACE::Buffer * pBuffer, 2186 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2187 { 2188 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2189 return static_cast<Result>( d.vkCreateBuffer( m_device, 2190 reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), 2191 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2192 reinterpret_cast<VkBuffer *>( pBuffer ) ) ); 2193 } 2194 2195 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2196 template <typename Dispatch> createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2197 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( 2198 const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2199 { 2200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2201 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2202 VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" ); 2203 # endif 2204 2205 VULKAN_HPP_NAMESPACE::Buffer buffer; 2206 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2207 d.vkCreateBuffer( m_device, 2208 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 2209 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2210 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 2211 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); 2212 2213 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); 2214 } 2215 2216 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2217 template <typename Dispatch> createBufferUnique(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2218 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( 2219 const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2220 { 2221 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2222 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2223 VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" ); 2224 # endif 2225 2226 VULKAN_HPP_NAMESPACE::Buffer buffer; 2227 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2228 d.vkCreateBuffer( m_device, 2229 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 2230 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2231 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 2232 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); 2233 2234 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2235 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2236 } 2237 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2238 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2239 2240 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2241 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 2242 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2243 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2244 { 2245 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2246 d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2247 } 2248 2249 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2250 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2251 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 2252 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2253 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2254 { 2255 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2256 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2257 VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" ); 2258 # endif 2259 2260 d.vkDestroyBuffer( m_device, 2261 static_cast<VkBuffer>( buffer ), 2262 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2263 } 2264 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2265 2266 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2267 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 2268 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2269 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2270 { 2271 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2272 d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2273 } 2274 2275 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2276 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2277 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 2278 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2279 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2280 { 2281 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2282 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2283 VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" ); 2284 # endif 2285 2286 d.vkDestroyBuffer( m_device, 2287 static_cast<VkBuffer>( buffer ), 2288 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2289 } 2290 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2291 2292 template <typename Dispatch> createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferView * pView,Dispatch const & d) const2293 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, 2294 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2295 VULKAN_HPP_NAMESPACE::BufferView * pView, 2296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2297 { 2298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2299 return static_cast<Result>( d.vkCreateBufferView( m_device, 2300 reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), 2301 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2302 reinterpret_cast<VkBufferView *>( pView ) ) ); 2303 } 2304 2305 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2306 template <typename Dispatch> 2307 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2308 Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, 2309 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2310 Dispatch const & d ) const 2311 { 2312 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2313 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2314 VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" ); 2315 # endif 2316 2317 VULKAN_HPP_NAMESPACE::BufferView view; 2318 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2319 d.vkCreateBufferView( m_device, 2320 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2321 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2322 reinterpret_cast<VkBufferView *>( &view ) ) ); 2323 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); 2324 2325 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); 2326 } 2327 2328 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2329 template <typename Dispatch> 2330 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type createBufferViewUnique(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2331 Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, 2332 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2333 Dispatch const & d ) const 2334 { 2335 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2336 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2337 VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" ); 2338 # endif 2339 2340 VULKAN_HPP_NAMESPACE::BufferView view; 2341 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2342 d.vkCreateBufferView( m_device, 2343 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2344 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2345 reinterpret_cast<VkBufferView *>( &view ) ) ); 2346 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); 2347 2348 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2349 result, UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2350 } 2351 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2352 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2353 2354 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2355 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2356 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2357 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2358 { 2359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2360 d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2361 } 2362 2363 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2364 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2365 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2366 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2367 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2368 { 2369 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2370 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2371 VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" ); 2372 # endif 2373 2374 d.vkDestroyBufferView( m_device, 2375 static_cast<VkBufferView>( bufferView ), 2376 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2377 } 2378 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2379 2380 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2381 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2382 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2383 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2384 { 2385 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2386 d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2387 } 2388 2389 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2390 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2391 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2392 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2393 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2394 { 2395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2396 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2397 VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" ); 2398 # endif 2399 2400 d.vkDestroyBufferView( m_device, 2401 static_cast<VkBufferView>( bufferView ), 2402 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2403 } 2404 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2405 2406 template <typename Dispatch> createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Image * pImage,Dispatch const & d) const2407 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, 2408 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2409 VULKAN_HPP_NAMESPACE::Image * pImage, 2410 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2411 { 2412 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2413 return static_cast<Result>( d.vkCreateImage( m_device, 2414 reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), 2415 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2416 reinterpret_cast<VkImage *>( pImage ) ) ); 2417 } 2418 2419 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2420 template <typename Dispatch> createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2421 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( 2422 const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2423 { 2424 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2425 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2426 VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" ); 2427 # endif 2428 2429 VULKAN_HPP_NAMESPACE::Image image; 2430 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2431 d.vkCreateImage( m_device, 2432 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2433 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2434 reinterpret_cast<VkImage *>( &image ) ) ); 2435 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); 2436 2437 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( image ) ); 2438 } 2439 2440 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2441 template <typename Dispatch> createImageUnique(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2442 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( 2443 const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2444 { 2445 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2446 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2447 VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" ); 2448 # endif 2449 2450 VULKAN_HPP_NAMESPACE::Image image; 2451 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2452 d.vkCreateImage( m_device, 2453 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2454 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2455 reinterpret_cast<VkImage *>( &image ) ) ); 2456 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); 2457 2458 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2459 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2460 } 2461 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2462 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2463 2464 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2465 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2466 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2467 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2468 { 2469 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2470 d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2471 } 2472 2473 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2474 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2475 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2476 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2477 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2478 { 2479 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2480 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2481 VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" ); 2482 # endif 2483 2484 d.vkDestroyImage( m_device, 2485 static_cast<VkImage>( image ), 2486 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2487 } 2488 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2489 2490 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2491 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2492 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2493 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2494 { 2495 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2496 d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2497 } 2498 2499 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2500 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2501 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2502 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2503 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2504 { 2505 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2506 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2507 VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" ); 2508 # endif 2509 2510 d.vkDestroyImage( m_device, 2511 static_cast<VkImage>( image ), 2512 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2513 } 2514 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2515 2516 template <typename Dispatch> getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,Dispatch const & d) const2517 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, 2518 const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, 2519 VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, 2520 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2521 { 2522 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2523 d.vkGetImageSubresourceLayout( m_device, 2524 static_cast<VkImage>( image ), 2525 reinterpret_cast<const VkImageSubresource *>( pSubresource ), 2526 reinterpret_cast<VkSubresourceLayout *>( pLayout ) ); 2527 } 2528 2529 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2530 template <typename Dispatch> getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource,Dispatch const & d) const2531 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( 2532 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2533 { 2534 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2535 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2536 VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout && "Function <vkGetImageSubresourceLayout> requires <VK_VERSION_1_0>" ); 2537 # endif 2538 2539 VULKAN_HPP_NAMESPACE::SubresourceLayout layout; 2540 d.vkGetImageSubresourceLayout( m_device, 2541 static_cast<VkImage>( image ), 2542 reinterpret_cast<const VkImageSubresource *>( &subresource ), 2543 reinterpret_cast<VkSubresourceLayout *>( &layout ) ); 2544 2545 return layout; 2546 } 2547 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2548 2549 template <typename Dispatch> createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ImageView * pView,Dispatch const & d) const2550 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, 2551 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2552 VULKAN_HPP_NAMESPACE::ImageView * pView, 2553 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2554 { 2555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2556 return static_cast<Result>( d.vkCreateImageView( m_device, 2557 reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), 2558 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2559 reinterpret_cast<VkImageView *>( pView ) ) ); 2560 } 2561 2562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2563 template <typename Dispatch> 2564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2565 Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, 2566 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2567 Dispatch const & d ) const 2568 { 2569 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2570 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2571 VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" ); 2572 # endif 2573 2574 VULKAN_HPP_NAMESPACE::ImageView view; 2575 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2576 d.vkCreateImageView( m_device, 2577 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2578 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2579 reinterpret_cast<VkImageView *>( &view ) ) ); 2580 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); 2581 2582 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); 2583 } 2584 2585 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2586 template <typename Dispatch> 2587 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type createImageViewUnique(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2588 Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, 2589 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2590 Dispatch const & d ) const 2591 { 2592 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2593 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2594 VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" ); 2595 # endif 2596 2597 VULKAN_HPP_NAMESPACE::ImageView view; 2598 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2599 d.vkCreateImageView( m_device, 2600 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2601 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2602 reinterpret_cast<VkImageView *>( &view ) ) ); 2603 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); 2604 2605 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2606 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2607 } 2608 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2609 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2610 2611 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2612 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2613 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2614 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2615 { 2616 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2617 d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2618 } 2619 2620 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2621 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2622 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2623 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2624 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2625 { 2626 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2627 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2628 VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" ); 2629 # endif 2630 2631 d.vkDestroyImageView( m_device, 2632 static_cast<VkImageView>( imageView ), 2633 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2634 } 2635 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2636 2637 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2638 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2639 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2640 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2641 { 2642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2643 d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2644 } 2645 2646 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2647 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2648 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2649 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2650 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2651 { 2652 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2653 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2654 VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" ); 2655 # endif 2656 2657 d.vkDestroyImageView( m_device, 2658 static_cast<VkImageView>( imageView ), 2659 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2660 } 2661 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2662 2663 template <typename Dispatch> createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,Dispatch const & d) const2664 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, 2665 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2666 VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, 2667 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2668 { 2669 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2670 return static_cast<Result>( d.vkCreateShaderModule( m_device, 2671 reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), 2672 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2673 reinterpret_cast<VkShaderModule *>( pShaderModule ) ) ); 2674 } 2675 2676 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2677 template <typename Dispatch> 2678 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2679 Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 2680 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2681 Dispatch const & d ) const 2682 { 2683 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2684 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2685 VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" ); 2686 # endif 2687 2688 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2689 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2690 d.vkCreateShaderModule( m_device, 2691 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2692 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2693 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2694 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); 2695 2696 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( shaderModule ) ); 2697 } 2698 2699 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2700 template <typename Dispatch> 2701 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type createShaderModuleUnique(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2702 Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 2703 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2704 Dispatch const & d ) const 2705 { 2706 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2707 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2708 VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" ); 2709 # endif 2710 2711 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2712 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2713 d.vkCreateShaderModule( m_device, 2714 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2715 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2716 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2717 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); 2718 2719 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2720 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2721 } 2722 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2723 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2724 2725 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2726 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2727 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2728 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2729 { 2730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2731 d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2732 } 2733 2734 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2735 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2736 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2737 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2738 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2739 { 2740 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2741 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2742 VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" ); 2743 # endif 2744 2745 d.vkDestroyShaderModule( m_device, 2746 static_cast<VkShaderModule>( shaderModule ), 2747 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2748 } 2749 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2750 2751 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2752 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2753 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2754 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2755 { 2756 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2757 d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2758 } 2759 2760 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2761 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2762 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2763 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2764 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2765 { 2766 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2767 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2768 VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" ); 2769 # endif 2770 2771 d.vkDestroyShaderModule( m_device, 2772 static_cast<VkShaderModule>( shaderModule ), 2773 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2774 } 2775 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2776 2777 template <typename Dispatch> createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,Dispatch const & d) const2778 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, 2779 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2780 VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, 2781 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2782 { 2783 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2784 return static_cast<Result>( d.vkCreatePipelineCache( m_device, 2785 reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), 2786 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2787 reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) ); 2788 } 2789 2790 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2791 template <typename Dispatch> 2792 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2793 Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, 2794 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2795 Dispatch const & d ) const 2796 { 2797 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2798 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2799 VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" ); 2800 # endif 2801 2802 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2803 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2804 d.vkCreatePipelineCache( m_device, 2805 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2806 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2807 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2808 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); 2809 2810 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineCache ) ); 2811 } 2812 2813 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2814 template <typename Dispatch> 2815 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type createPipelineCacheUnique(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2816 Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, 2817 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2818 Dispatch const & d ) const 2819 { 2820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2821 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2822 VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" ); 2823 # endif 2824 2825 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2826 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2827 d.vkCreatePipelineCache( m_device, 2828 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2829 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2830 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2831 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); 2832 2833 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 2834 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2835 } 2836 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2837 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2838 2839 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2840 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2841 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2843 { 2844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2845 d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2846 } 2847 2848 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2849 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2850 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2851 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2852 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2853 { 2854 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2855 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2856 VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" ); 2857 # endif 2858 2859 d.vkDestroyPipelineCache( m_device, 2860 static_cast<VkPipelineCache>( pipelineCache ), 2861 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2862 } 2863 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2864 2865 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2866 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2867 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2868 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2869 { 2870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2871 d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2872 } 2873 2874 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2875 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2876 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2877 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2878 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2879 { 2880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2881 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2882 VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" ); 2883 # endif 2884 2885 d.vkDestroyPipelineCache( m_device, 2886 static_cast<VkPipelineCache>( pipelineCache ), 2887 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2888 } 2889 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2890 2891 template <typename Dispatch> getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,size_t * pDataSize,void * pData,Dispatch const & d) const2892 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2893 size_t * pDataSize, 2894 void * pData, 2895 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2896 { 2897 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2898 return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) ); 2899 } 2900 2901 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2902 template <typename Uint8_tAllocator, typename Dispatch> 2903 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Dispatch const & d) const2904 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const 2905 { 2906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2907 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2908 VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" ); 2909 # endif 2910 2911 std::vector<uint8_t, Uint8_tAllocator> data; 2912 size_t dataSize; 2913 VULKAN_HPP_NAMESPACE::Result result; 2914 do 2915 { 2916 result = 2917 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 2918 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 2919 { 2920 data.resize( dataSize ); 2921 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2922 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 2923 } 2924 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 2925 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 2926 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 2927 if ( dataSize < data.size() ) 2928 { 2929 data.resize( dataSize ); 2930 } 2931 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 2932 } 2933 2934 template <typename Uint8_tAllocator, 2935 typename Dispatch, 2936 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 2937 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const2938 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 2939 { 2940 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2941 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2942 VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" ); 2943 # endif 2944 2945 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 2946 size_t dataSize; 2947 VULKAN_HPP_NAMESPACE::Result result; 2948 do 2949 { 2950 result = 2951 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 2952 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 2953 { 2954 data.resize( dataSize ); 2955 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2956 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 2957 } 2958 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 2959 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 2960 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 2961 if ( dataSize < data.size() ) 2962 { 2963 data.resize( dataSize ); 2964 } 2965 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 2966 } 2967 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2968 2969 template <typename Dispatch> mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,Dispatch const & d) const2970 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 2971 uint32_t srcCacheCount, 2972 const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, 2973 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2974 { 2975 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2976 return static_cast<Result>( 2977 d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) ); 2978 } 2979 2980 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2981 template <typename Dispatch> 2982 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,Dispatch const & d) const2983 Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 2984 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, 2985 Dispatch const & d ) const 2986 { 2987 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2988 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 2989 VULKAN_HPP_ASSERT( d.vkMergePipelineCaches && "Function <vkMergePipelineCaches> requires <VK_VERSION_1_0>" ); 2990 # endif 2991 2992 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergePipelineCaches( 2993 m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) ); 2994 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); 2995 2996 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 2997 } 2998 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2999 3000 template <typename Dispatch> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const3001 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3002 uint32_t createInfoCount, 3003 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, 3004 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3005 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 3006 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3007 { 3008 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3009 return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, 3010 static_cast<VkPipelineCache>( pipelineCache ), 3011 createInfoCount, 3012 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), 3013 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3014 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 3015 } 3016 3017 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3018 template <typename PipelineAllocator, typename Dispatch> 3019 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3020 Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3021 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3022 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3023 Dispatch const & d ) const 3024 { 3025 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3026 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3027 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3028 # endif 3029 3030 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 3031 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3032 m_device, 3033 static_cast<VkPipelineCache>( pipelineCache ), 3034 createInfos.size(), 3035 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3036 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3037 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3038 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3039 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 3040 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3041 3042 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3043 } 3044 3045 template <typename PipelineAllocator, 3046 typename Dispatch, 3047 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 3048 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3049 Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3050 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3051 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3052 PipelineAllocator & pipelineAllocator, 3053 Dispatch const & d ) const 3054 { 3055 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3056 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3057 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3058 # endif 3059 3060 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 3061 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3062 m_device, 3063 static_cast<VkPipelineCache>( pipelineCache ), 3064 createInfos.size(), 3065 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3066 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3067 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3068 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3069 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 3070 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3071 3072 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3073 } 3074 3075 template <typename Dispatch> 3076 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createGraphicsPipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3077 Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3078 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 3079 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3080 Dispatch const & d ) const 3081 { 3082 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3083 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3084 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3085 # endif 3086 3087 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3088 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3089 m_device, 3090 static_cast<VkPipelineCache>( pipelineCache ), 3091 1, 3092 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 3093 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3094 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3095 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3096 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", 3097 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3098 3099 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 3100 } 3101 3102 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3103 template <typename Dispatch, typename PipelineAllocator> 3104 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3105 Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3106 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3107 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3108 Dispatch const & d ) const 3109 { 3110 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3111 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3112 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3113 # endif 3114 3115 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3116 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3117 m_device, 3118 static_cast<VkPipelineCache>( pipelineCache ), 3119 createInfos.size(), 3120 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3121 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3122 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3123 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3124 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 3125 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3126 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 3127 uniquePipelines.reserve( createInfos.size() ); 3128 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3129 for ( auto const & pipeline : pipelines ) 3130 { 3131 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3132 } 3133 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3134 } 3135 3136 template < 3137 typename Dispatch, 3138 typename PipelineAllocator, 3139 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 3140 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3141 Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3142 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 3143 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3144 PipelineAllocator & pipelineAllocator, 3145 Dispatch const & d ) const 3146 { 3147 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3148 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3149 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3150 # endif 3151 3152 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3153 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3154 m_device, 3155 static_cast<VkPipelineCache>( pipelineCache ), 3156 createInfos.size(), 3157 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 3158 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3159 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3160 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3161 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 3162 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3163 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 3164 uniquePipelines.reserve( createInfos.size() ); 3165 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3166 for ( auto const & pipeline : pipelines ) 3167 { 3168 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3169 } 3170 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3171 } 3172 3173 template <typename Dispatch> 3174 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createGraphicsPipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3175 Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3176 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 3177 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3178 Dispatch const & d ) const 3179 { 3180 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3181 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3182 VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); 3183 # endif 3184 3185 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3186 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 3187 m_device, 3188 static_cast<VkPipelineCache>( pipelineCache ), 3189 1, 3190 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 3191 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3192 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3193 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3194 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", 3195 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3196 3197 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 3198 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3199 } 3200 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3201 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3202 3203 template <typename Dispatch> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const3204 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3205 uint32_t createInfoCount, 3206 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, 3207 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3208 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 3209 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3210 { 3211 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3212 return static_cast<Result>( d.vkCreateComputePipelines( m_device, 3213 static_cast<VkPipelineCache>( pipelineCache ), 3214 createInfoCount, 3215 reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), 3216 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3217 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 3218 } 3219 3220 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3221 template <typename PipelineAllocator, typename Dispatch> 3222 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3223 Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3224 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3225 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3226 Dispatch const & d ) const 3227 { 3228 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3229 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3230 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3231 # endif 3232 3233 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 3234 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3235 m_device, 3236 static_cast<VkPipelineCache>( pipelineCache ), 3237 createInfos.size(), 3238 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3239 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3240 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3241 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3242 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 3243 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3244 3245 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3246 } 3247 3248 template <typename PipelineAllocator, 3249 typename Dispatch, 3250 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 3251 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3252 Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3253 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3254 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3255 PipelineAllocator & pipelineAllocator, 3256 Dispatch const & d ) const 3257 { 3258 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3259 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3260 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3261 # endif 3262 3263 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 3264 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3265 m_device, 3266 static_cast<VkPipelineCache>( pipelineCache ), 3267 createInfos.size(), 3268 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3269 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3270 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3271 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3272 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 3273 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3274 3275 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 3276 } 3277 3278 template <typename Dispatch> 3279 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createComputePipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3280 Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3281 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 3282 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3283 Dispatch const & d ) const 3284 { 3285 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3286 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3287 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3288 # endif 3289 3290 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3291 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3292 m_device, 3293 static_cast<VkPipelineCache>( pipelineCache ), 3294 1, 3295 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 3296 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3297 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3298 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3299 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", 3300 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3301 3302 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 3303 } 3304 3305 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3306 template <typename Dispatch, typename PipelineAllocator> 3307 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3308 Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3309 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3310 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3311 Dispatch const & d ) const 3312 { 3313 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3314 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3315 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3316 # endif 3317 3318 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3319 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3320 m_device, 3321 static_cast<VkPipelineCache>( pipelineCache ), 3322 createInfos.size(), 3323 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3324 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3325 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3326 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3327 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 3328 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3329 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 3330 uniquePipelines.reserve( createInfos.size() ); 3331 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3332 for ( auto const & pipeline : pipelines ) 3333 { 3334 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3335 } 3336 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3337 } 3338 3339 template < 3340 typename Dispatch, 3341 typename PipelineAllocator, 3342 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 3343 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3344 Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3345 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3346 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3347 PipelineAllocator & pipelineAllocator, 3348 Dispatch const & d ) const 3349 { 3350 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3351 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3352 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3353 # endif 3354 3355 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3356 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3357 m_device, 3358 static_cast<VkPipelineCache>( pipelineCache ), 3359 createInfos.size(), 3360 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3361 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3362 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3363 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3364 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 3365 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3366 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 3367 uniquePipelines.reserve( createInfos.size() ); 3368 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3369 for ( auto const & pipeline : pipelines ) 3370 { 3371 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3372 } 3373 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 3374 } 3375 3376 template <typename Dispatch> 3377 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createComputePipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3378 Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3379 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 3380 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3381 Dispatch const & d ) const 3382 { 3383 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3384 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3385 VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); 3386 # endif 3387 3388 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3389 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3390 m_device, 3391 static_cast<VkPipelineCache>( pipelineCache ), 3392 1, 3393 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 3394 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3395 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3396 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 3397 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", 3398 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3399 3400 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 3401 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3402 } 3403 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3404 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3405 3406 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3407 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3408 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3409 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3410 { 3411 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3412 d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3413 } 3414 3415 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3416 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3417 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3418 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3419 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3420 { 3421 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3422 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3423 VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" ); 3424 # endif 3425 3426 d.vkDestroyPipeline( m_device, 3427 static_cast<VkPipeline>( pipeline ), 3428 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3429 } 3430 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3431 3432 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3433 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3434 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3435 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3436 { 3437 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3438 d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3439 } 3440 3441 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3442 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3443 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3444 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3445 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3446 { 3447 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3448 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3449 VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" ); 3450 # endif 3451 3452 d.vkDestroyPipeline( m_device, 3453 static_cast<VkPipeline>( pipeline ), 3454 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3455 } 3456 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3457 3458 template <typename Dispatch> createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,Dispatch const & d) const3459 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, 3460 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3461 VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, 3462 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3463 { 3464 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3465 return static_cast<Result>( d.vkCreatePipelineLayout( m_device, 3466 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), 3467 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3468 reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) ); 3469 } 3470 3471 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3472 template <typename Dispatch> 3473 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3474 Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, 3475 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3476 Dispatch const & d ) const 3477 { 3478 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3479 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3480 VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" ); 3481 # endif 3482 3483 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3484 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3485 d.vkCreatePipelineLayout( m_device, 3486 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3487 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3488 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3489 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); 3490 3491 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineLayout ) ); 3492 } 3493 3494 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3495 template <typename Dispatch> 3496 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type createPipelineLayoutUnique(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3497 Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, 3498 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3499 Dispatch const & d ) const 3500 { 3501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3502 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3503 VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" ); 3504 # endif 3505 3506 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3507 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3508 d.vkCreatePipelineLayout( m_device, 3509 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3510 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3511 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3512 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); 3513 3514 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3515 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3516 } 3517 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3518 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3519 3520 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3521 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3522 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3523 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3524 { 3525 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3526 d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3527 } 3528 3529 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3530 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3531 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3532 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3533 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3534 { 3535 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3536 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3537 VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" ); 3538 # endif 3539 3540 d.vkDestroyPipelineLayout( m_device, 3541 static_cast<VkPipelineLayout>( pipelineLayout ), 3542 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3543 } 3544 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3545 3546 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3547 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3548 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3549 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3550 { 3551 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3552 d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3553 } 3554 3555 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3556 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3557 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3558 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3559 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3560 { 3561 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3562 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3563 VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" ); 3564 # endif 3565 3566 d.vkDestroyPipelineLayout( m_device, 3567 static_cast<VkPipelineLayout>( pipelineLayout ), 3568 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3569 } 3570 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3571 3572 template <typename Dispatch> createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Sampler * pSampler,Dispatch const & d) const3573 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, 3574 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3575 VULKAN_HPP_NAMESPACE::Sampler * pSampler, 3576 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3577 { 3578 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3579 return static_cast<Result>( d.vkCreateSampler( m_device, 3580 reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), 3581 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3582 reinterpret_cast<VkSampler *>( pSampler ) ) ); 3583 } 3584 3585 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3586 template <typename Dispatch> createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3587 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( 3588 const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 3589 { 3590 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3591 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3592 VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" ); 3593 # endif 3594 3595 VULKAN_HPP_NAMESPACE::Sampler sampler; 3596 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3597 d.vkCreateSampler( m_device, 3598 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3599 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3600 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3601 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); 3602 3603 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sampler ) ); 3604 } 3605 3606 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3607 template <typename Dispatch> createSamplerUnique(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3608 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( 3609 const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 3610 { 3611 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3612 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3613 VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" ); 3614 # endif 3615 3616 VULKAN_HPP_NAMESPACE::Sampler sampler; 3617 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3618 d.vkCreateSampler( m_device, 3619 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3620 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3621 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3622 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); 3623 3624 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3625 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3626 } 3627 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3628 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3629 3630 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3631 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3632 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3633 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3634 { 3635 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3636 d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3637 } 3638 3639 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3640 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3641 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3642 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3643 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3644 { 3645 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3646 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3647 VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" ); 3648 # endif 3649 3650 d.vkDestroySampler( m_device, 3651 static_cast<VkSampler>( sampler ), 3652 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3653 } 3654 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3655 3656 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3657 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3658 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3659 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3660 { 3661 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3662 d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3663 } 3664 3665 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3666 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3667 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3668 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3669 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3670 { 3671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3672 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3673 VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" ); 3674 # endif 3675 3676 d.vkDestroySampler( m_device, 3677 static_cast<VkSampler>( sampler ), 3678 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3679 } 3680 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3681 3682 template <typename Dispatch> createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,Dispatch const & d) const3683 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 3684 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3685 VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, 3686 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3687 { 3688 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3689 return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, 3690 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), 3691 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3692 reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) ); 3693 } 3694 3695 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3696 template <typename Dispatch> 3697 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3698 Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 3699 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3700 Dispatch const & d ) const 3701 { 3702 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3703 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3704 VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3705 # endif 3706 3707 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3708 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout( 3709 m_device, 3710 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3711 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3712 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3713 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); 3714 3715 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( setLayout ) ); 3716 } 3717 3718 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3719 template <typename Dispatch> 3720 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type createDescriptorSetLayoutUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3721 Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 3722 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3723 Dispatch const & d ) const 3724 { 3725 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3726 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3727 VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3728 # endif 3729 3730 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3731 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout( 3732 m_device, 3733 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3734 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3735 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3736 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); 3737 3738 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3739 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3740 } 3741 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3742 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3743 3744 template <typename Dispatch> destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3745 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3746 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3747 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3748 { 3749 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3750 d.vkDestroyDescriptorSetLayout( 3751 m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3752 } 3753 3754 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3755 template <typename Dispatch> destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3756 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3757 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3758 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3759 { 3760 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3761 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3762 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3763 # endif 3764 3765 d.vkDestroyDescriptorSetLayout( 3766 m_device, 3767 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3768 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3769 } 3770 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3771 3772 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3773 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3774 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3775 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3776 { 3777 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3778 d.vkDestroyDescriptorSetLayout( 3779 m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3780 } 3781 3782 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3783 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3784 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3785 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3786 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3787 { 3788 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3789 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3790 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" ); 3791 # endif 3792 3793 d.vkDestroyDescriptorSetLayout( 3794 m_device, 3795 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3796 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3797 } 3798 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3799 3800 template <typename Dispatch> createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,Dispatch const & d) const3801 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, 3802 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3803 VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, 3804 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3805 { 3806 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3807 return static_cast<Result>( d.vkCreateDescriptorPool( m_device, 3808 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), 3809 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3810 reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) ); 3811 } 3812 3813 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3814 template <typename Dispatch> 3815 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3816 Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, 3817 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3818 Dispatch const & d ) const 3819 { 3820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3821 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3822 VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" ); 3823 # endif 3824 3825 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3826 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3827 d.vkCreateDescriptorPool( m_device, 3828 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3829 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3830 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3831 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); 3832 3833 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorPool ) ); 3834 } 3835 3836 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3837 template <typename Dispatch> 3838 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type createDescriptorPoolUnique(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3839 Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, 3840 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3841 Dispatch const & d ) const 3842 { 3843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3844 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3845 VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" ); 3846 # endif 3847 3848 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3849 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3850 d.vkCreateDescriptorPool( m_device, 3851 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3852 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3853 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3854 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); 3855 3856 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 3857 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3858 } 3859 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3860 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3861 3862 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3863 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3864 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3865 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3866 { 3867 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3868 d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3869 } 3870 3871 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3872 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3873 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3874 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3875 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3876 { 3877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3878 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3879 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" ); 3880 # endif 3881 3882 d.vkDestroyDescriptorPool( m_device, 3883 static_cast<VkDescriptorPool>( descriptorPool ), 3884 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3885 } 3886 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3887 3888 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3889 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3890 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3891 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3892 { 3893 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3894 d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3895 } 3896 3897 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3898 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3899 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3900 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3901 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3902 { 3903 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3904 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3905 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" ); 3906 # endif 3907 3908 d.vkDestroyDescriptorPool( m_device, 3909 static_cast<VkDescriptorPool>( descriptorPool ), 3910 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3911 } 3912 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3913 3914 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 3915 template <typename Dispatch> resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3916 VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3917 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3918 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3919 { 3920 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3921 return static_cast<Result>( 3922 d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); 3923 } 3924 #else 3925 template <typename Dispatch> resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3926 VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3927 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3928 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3929 { 3930 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3931 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3932 VULKAN_HPP_ASSERT( d.vkResetDescriptorPool && "Function <vkResetDescriptorPool> requires <VK_VERSION_1_0>" ); 3933 # endif 3934 3935 d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ); 3936 } 3937 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3938 3939 template <typename Dispatch> allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const3940 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, 3941 VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 3942 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3943 { 3944 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3945 return static_cast<Result>( d.vkAllocateDescriptorSets( 3946 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) ); 3947 } 3948 3949 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3950 template <typename DescriptorSetAllocator, typename Dispatch> 3951 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const3952 Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 3953 { 3954 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3955 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3956 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 3957 # endif 3958 3959 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount ); 3960 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 3961 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3962 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 3963 3964 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); 3965 } 3966 3967 template <typename DescriptorSetAllocator, 3968 typename Dispatch, 3969 typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, VULKAN_HPP_NAMESPACE::DescriptorSet>::value, int>::type> 3970 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const3971 Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, 3972 DescriptorSetAllocator & descriptorSetAllocator, 3973 Dispatch const & d ) const 3974 { 3975 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3976 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3977 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 3978 # endif 3979 3980 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); 3981 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 3982 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3983 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 3984 3985 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); 3986 } 3987 3988 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3989 template <typename Dispatch, typename DescriptorSetAllocator> 3990 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 3991 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const3992 Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 3993 { 3994 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3995 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 3996 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 3997 # endif 3998 3999 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 4000 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 4001 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 4002 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 4003 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets; 4004 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 4005 PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 4006 for ( auto const & descriptorSet : descriptorSets ) 4007 { 4008 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); 4009 } 4010 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); 4011 } 4012 4013 template < 4014 typename Dispatch, 4015 typename DescriptorSetAllocator, 4016 typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>::value, 4017 int>::type> 4018 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4019 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const4020 Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, 4021 DescriptorSetAllocator & descriptorSetAllocator, 4022 Dispatch const & d ) const 4023 { 4024 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4025 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4026 VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); 4027 # endif 4028 4029 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 4030 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 4031 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 4032 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 4033 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); 4034 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 4035 PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 4036 for ( auto const & descriptorSet : descriptorSets ) 4037 { 4038 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); 4039 } 4040 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); 4041 } 4042 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4043 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4044 4045 template <typename Dispatch> freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const4046 VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4047 uint32_t descriptorSetCount, 4048 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4049 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4050 { 4051 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4052 return static_cast<Result>( d.vkFreeDescriptorSets( 4053 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 4054 } 4055 4056 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4057 template <typename Dispatch> freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,Dispatch const & d) const4058 VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4059 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4060 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4061 { 4062 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4063 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4064 VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" ); 4065 # endif 4066 4067 d.vkFreeDescriptorSets( 4068 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); 4069 } 4070 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4071 4072 template <typename Dispatch> Result(Device::free)4073 VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4074 uint32_t descriptorSetCount, 4075 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4076 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4077 { 4078 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4079 return static_cast<Result>( d.vkFreeDescriptorSets( 4080 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 4081 } 4082 4083 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4084 template <typename Dispatch> 4085 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 4086 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4087 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4088 { 4089 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4090 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4091 VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" ); 4092 # endif 4093 4094 d.vkFreeDescriptorSets( 4095 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); 4096 } 4097 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4098 4099 template <typename Dispatch> updateDescriptorSets(uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,Dispatch const & d) const4100 VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, 4101 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 4102 uint32_t descriptorCopyCount, 4103 const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, 4104 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4105 { 4106 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4107 d.vkUpdateDescriptorSets( m_device, 4108 descriptorWriteCount, 4109 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), 4110 descriptorCopyCount, 4111 reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) ); 4112 } 4113 4114 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4115 template <typename Dispatch> 4116 VULKAN_HPP_INLINE void updateDescriptorSets(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,Dispatch const & d) const4117 Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 4118 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, 4119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4120 { 4121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4122 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4123 VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSets && "Function <vkUpdateDescriptorSets> requires <VK_VERSION_1_0>" ); 4124 # endif 4125 4126 d.vkUpdateDescriptorSets( m_device, 4127 descriptorWrites.size(), 4128 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), 4129 descriptorCopies.size(), 4130 reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) ); 4131 } 4132 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4133 4134 template <typename Dispatch> createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,Dispatch const & d) const4135 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, 4136 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4137 VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, 4138 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4139 { 4140 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4141 return static_cast<Result>( d.vkCreateFramebuffer( m_device, 4142 reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), 4143 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 4144 reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) ); 4145 } 4146 4147 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4148 template <typename Dispatch> 4149 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4150 Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, 4151 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4152 Dispatch const & d ) const 4153 { 4154 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4155 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4156 VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" ); 4157 # endif 4158 4159 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 4160 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4161 d.vkCreateFramebuffer( m_device, 4162 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 4163 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4164 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 4165 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); 4166 4167 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( framebuffer ) ); 4168 } 4169 4170 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4171 template <typename Dispatch> 4172 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type createFramebufferUnique(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4173 Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, 4174 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4175 Dispatch const & d ) const 4176 { 4177 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4178 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4179 VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" ); 4180 # endif 4181 4182 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 4183 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4184 d.vkCreateFramebuffer( m_device, 4185 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 4186 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4187 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 4188 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); 4189 4190 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 4191 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4192 } 4193 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4194 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4195 4196 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4197 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4198 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4199 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4200 { 4201 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4202 d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4203 } 4204 4205 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4206 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4207 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4208 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4209 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4210 { 4211 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4212 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4213 VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" ); 4214 # endif 4215 4216 d.vkDestroyFramebuffer( m_device, 4217 static_cast<VkFramebuffer>( framebuffer ), 4218 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4219 } 4220 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4221 4222 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4223 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4224 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4225 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4226 { 4227 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4228 d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4229 } 4230 4231 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4232 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4233 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 4234 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4235 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4236 { 4237 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4238 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4239 VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" ); 4240 # endif 4241 4242 d.vkDestroyFramebuffer( m_device, 4243 static_cast<VkFramebuffer>( framebuffer ), 4244 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4245 } 4246 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4247 4248 template <typename Dispatch> createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const4249 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, 4250 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4251 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 4252 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4253 { 4254 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4255 return static_cast<Result>( d.vkCreateRenderPass( m_device, 4256 reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), 4257 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 4258 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 4259 } 4260 4261 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4262 template <typename Dispatch> 4263 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4264 Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, 4265 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4266 Dispatch const & d ) const 4267 { 4268 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4269 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4270 VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" ); 4271 # endif 4272 4273 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 4274 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4275 d.vkCreateRenderPass( m_device, 4276 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 4277 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4278 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 4279 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); 4280 4281 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); 4282 } 4283 4284 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4285 template <typename Dispatch> 4286 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPassUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4287 Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, 4288 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4289 Dispatch const & d ) const 4290 { 4291 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4292 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4293 VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" ); 4294 # endif 4295 4296 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 4297 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4298 d.vkCreateRenderPass( m_device, 4299 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 4300 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4301 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 4302 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); 4303 4304 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 4305 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4306 } 4307 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4308 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4309 4310 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4311 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4312 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4313 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4314 { 4315 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4316 d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4317 } 4318 4319 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4320 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4321 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4322 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4323 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4324 { 4325 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4326 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4327 VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" ); 4328 # endif 4329 4330 d.vkDestroyRenderPass( m_device, 4331 static_cast<VkRenderPass>( renderPass ), 4332 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4333 } 4334 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4335 4336 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4337 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4338 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4339 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4340 { 4341 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4342 d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4343 } 4344 4345 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4346 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4347 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4348 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4349 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4350 { 4351 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4352 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4353 VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" ); 4354 # endif 4355 4356 d.vkDestroyRenderPass( m_device, 4357 static_cast<VkRenderPass>( renderPass ), 4358 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4359 } 4360 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4361 4362 template <typename Dispatch> getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const4363 VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4364 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 4365 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4366 { 4367 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4368 d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 4369 } 4370 4371 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4372 template <typename Dispatch> getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Dispatch const & d) const4373 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 4374 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4375 { 4376 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4377 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4378 VULKAN_HPP_ASSERT( d.vkGetRenderAreaGranularity && "Function <vkGetRenderAreaGranularity> requires <VK_VERSION_1_0>" ); 4379 # endif 4380 4381 VULKAN_HPP_NAMESPACE::Extent2D granularity; 4382 d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 4383 4384 return granularity; 4385 } 4386 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4387 4388 template <typename Dispatch> createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,Dispatch const & d) const4389 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, 4390 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4391 VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, 4392 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4393 { 4394 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4395 return static_cast<Result>( d.vkCreateCommandPool( m_device, 4396 reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), 4397 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 4398 reinterpret_cast<VkCommandPool *>( pCommandPool ) ) ); 4399 } 4400 4401 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4402 template <typename Dispatch> 4403 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4404 Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, 4405 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4406 Dispatch const & d ) const 4407 { 4408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4409 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4410 VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" ); 4411 # endif 4412 4413 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 4414 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4415 d.vkCreateCommandPool( m_device, 4416 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 4417 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4418 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 4419 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); 4420 4421 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandPool ) ); 4422 } 4423 4424 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4425 template <typename Dispatch> 4426 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type createCommandPoolUnique(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4427 Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, 4428 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4429 Dispatch const & d ) const 4430 { 4431 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4432 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4433 VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" ); 4434 # endif 4435 4436 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 4437 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4438 d.vkCreateCommandPool( m_device, 4439 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 4440 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4441 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 4442 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); 4443 4444 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 4445 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4446 } 4447 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4448 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4449 4450 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4451 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4452 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4453 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4454 { 4455 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4456 d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4457 } 4458 4459 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4460 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4461 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4462 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4463 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4464 { 4465 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4466 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4467 VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" ); 4468 # endif 4469 4470 d.vkDestroyCommandPool( m_device, 4471 static_cast<VkCommandPool>( commandPool ), 4472 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4473 } 4474 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4475 4476 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4477 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4478 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4479 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4480 { 4481 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4482 d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4483 } 4484 4485 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4486 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4487 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4488 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4489 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4490 { 4491 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4492 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4493 VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" ); 4494 # endif 4495 4496 d.vkDestroyCommandPool( m_device, 4497 static_cast<VkCommandPool>( commandPool ), 4498 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4499 } 4500 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4501 4502 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4503 template <typename Dispatch> resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const4504 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4505 VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, 4506 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4507 { 4508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4509 return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4510 } 4511 #else 4512 template <typename Dispatch> 4513 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const4514 Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const 4515 { 4516 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4517 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4518 VULKAN_HPP_ASSERT( d.vkResetCommandPool && "Function <vkResetCommandPool> requires <VK_VERSION_1_0>" ); 4519 # endif 4520 4521 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4522 d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4523 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); 4524 4525 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4526 } 4527 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4528 4529 template <typename Dispatch> allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4530 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, 4531 VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4532 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4533 { 4534 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4535 return static_cast<Result>( d.vkAllocateCommandBuffers( 4536 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) ); 4537 } 4538 4539 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4540 template <typename CommandBufferAllocator, typename Dispatch> 4541 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4542 Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4543 { 4544 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4545 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4546 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4547 # endif 4548 4549 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount ); 4550 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4551 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4552 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4553 4554 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); 4555 } 4556 4557 template <typename CommandBufferAllocator, 4558 typename Dispatch, 4559 typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, VULKAN_HPP_NAMESPACE::CommandBuffer>::value, int>::type> 4560 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4561 Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, 4562 CommandBufferAllocator & commandBufferAllocator, 4563 Dispatch const & d ) const 4564 { 4565 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4566 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4567 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4568 # endif 4569 4570 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); 4571 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4572 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4573 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4574 4575 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); 4576 } 4577 4578 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4579 template <typename Dispatch, typename CommandBufferAllocator> 4580 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4581 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4582 Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4583 { 4584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4585 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4586 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4587 # endif 4588 4589 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4590 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4591 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4592 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4593 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers; 4594 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4595 PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4596 for ( auto const & commandBuffer : commandBuffers ) 4597 { 4598 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); 4599 } 4600 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); 4601 } 4602 4603 template < 4604 typename Dispatch, 4605 typename CommandBufferAllocator, 4606 typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>::value, 4607 int>::type> 4608 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4609 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4610 Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, 4611 CommandBufferAllocator & commandBufferAllocator, 4612 Dispatch const & d ) const 4613 { 4614 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4615 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4616 VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); 4617 # endif 4618 4619 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4620 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4621 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4622 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4623 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); 4624 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4625 PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4626 for ( auto const & commandBuffer : commandBuffers ) 4627 { 4628 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); 4629 } 4630 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); 4631 } 4632 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4633 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4634 4635 template <typename Dispatch> freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4636 VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4637 uint32_t commandBufferCount, 4638 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4639 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4640 { 4641 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4642 d.vkFreeCommandBuffers( 4643 m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4644 } 4645 4646 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4647 template <typename Dispatch> freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const4648 VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4649 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4650 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4651 { 4652 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4653 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4654 VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" ); 4655 # endif 4656 4657 d.vkFreeCommandBuffers( 4658 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4659 } 4660 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4661 4662 template <typename Dispatch> 4663 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4664 uint32_t commandBufferCount, 4665 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4666 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4667 { 4668 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4669 d.vkFreeCommandBuffers( 4670 m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4671 } 4672 4673 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4674 template <typename Dispatch> 4675 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4676 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4677 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4678 { 4679 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4680 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4681 VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" ); 4682 # endif 4683 4684 d.vkFreeCommandBuffers( 4685 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4686 } 4687 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4688 4689 template <typename Dispatch> begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,Dispatch const & d) const4690 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, 4691 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4692 { 4693 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4694 return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) ); 4695 } 4696 4697 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4698 template <typename Dispatch> 4699 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo,Dispatch const & d) const4700 CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const 4701 { 4702 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4703 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4704 VULKAN_HPP_ASSERT( d.vkBeginCommandBuffer && "Function <vkBeginCommandBuffer> requires <VK_VERSION_1_0>" ); 4705 # endif 4706 4707 VULKAN_HPP_NAMESPACE::Result result = 4708 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) ); 4709 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); 4710 4711 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4712 } 4713 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4714 4715 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4716 template <typename Dispatch> end(Dispatch const & d) const4717 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4718 { 4719 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4720 return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); 4721 } 4722 #else 4723 template <typename Dispatch> end(Dispatch const & d) const4724 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const 4725 { 4726 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4727 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4728 VULKAN_HPP_ASSERT( d.vkEndCommandBuffer && "Function <vkEndCommandBuffer> requires <VK_VERSION_1_0>" ); 4729 # endif 4730 4731 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); 4732 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); 4733 4734 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4735 } 4736 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4737 4738 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4739 template <typename Dispatch> reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4740 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, 4741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4742 { 4743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4744 return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4745 } 4746 #else 4747 template <typename Dispatch> reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4748 VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const 4749 { 4750 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4751 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4752 VULKAN_HPP_ASSERT( d.vkResetCommandBuffer && "Function <vkResetCommandBuffer> requires <VK_VERSION_1_0>" ); 4753 # endif 4754 4755 VULKAN_HPP_NAMESPACE::Result result = 4756 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4757 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); 4758 4759 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 4760 } 4761 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4762 4763 template <typename Dispatch> bindPipeline(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const4764 VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4765 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 4766 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4767 { 4768 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4769 d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); 4770 } 4771 4772 template <typename Dispatch> setViewport(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const4773 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4774 uint32_t viewportCount, 4775 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 4776 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4777 { 4778 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4779 d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 4780 } 4781 4782 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4783 template <typename Dispatch> setViewport(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const4784 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4785 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 4786 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4787 { 4788 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4789 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4790 VULKAN_HPP_ASSERT( d.vkCmdSetViewport && "Function <vkCmdSetViewport> requires <VK_VERSION_1_0>" ); 4791 # endif 4792 4793 d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 4794 } 4795 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4796 4797 template <typename Dispatch> setScissor(uint32_t firstScissor,uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const4798 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4799 uint32_t scissorCount, 4800 const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, 4801 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4802 { 4803 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4804 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 4805 } 4806 4807 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4808 template <typename Dispatch> setScissor(uint32_t firstScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const4809 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4810 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 4811 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4812 { 4813 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4814 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4815 VULKAN_HPP_ASSERT( d.vkCmdSetScissor && "Function <vkCmdSetScissor> requires <VK_VERSION_1_0>" ); 4816 # endif 4817 4818 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 4819 } 4820 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4821 4822 template <typename Dispatch> setLineWidth(float lineWidth,Dispatch const & d) const4823 VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4824 { 4825 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4826 d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); 4827 } 4828 4829 template <typename Dispatch> 4830 VULKAN_HPP_INLINE void setDepthBias(float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor,Dispatch const & d) const4831 CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4832 { 4833 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4834 d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); 4835 } 4836 4837 template <typename Dispatch> setBlendConstants(const float blendConstants[4],Dispatch const & d) const4838 VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4839 { 4840 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4841 d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); 4842 } 4843 4844 template <typename Dispatch> setDepthBounds(float minDepthBounds,float maxDepthBounds,Dispatch const & d) const4845 VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4846 { 4847 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4848 d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); 4849 } 4850 4851 template <typename Dispatch> 4852 VULKAN_HPP_INLINE void setStencilCompareMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t compareMask,Dispatch const & d) const4853 CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4854 { 4855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4856 d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask ); 4857 } 4858 4859 template <typename Dispatch> 4860 VULKAN_HPP_INLINE void setStencilWriteMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t writeMask,Dispatch const & d) const4861 CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4862 { 4863 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4864 d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask ); 4865 } 4866 4867 template <typename Dispatch> 4868 VULKAN_HPP_INLINE void setStencilReference(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t reference,Dispatch const & d) const4869 CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4870 { 4871 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4872 d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference ); 4873 } 4874 4875 template <typename Dispatch> bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets,Dispatch const & d) const4876 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4877 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 4878 uint32_t firstSet, 4879 uint32_t descriptorSetCount, 4880 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4881 uint32_t dynamicOffsetCount, 4882 const uint32_t * pDynamicOffsets, 4883 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4884 { 4885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4886 d.vkCmdBindDescriptorSets( m_commandBuffer, 4887 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 4888 static_cast<VkPipelineLayout>( layout ), 4889 firstSet, 4890 descriptorSetCount, 4891 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), 4892 dynamicOffsetCount, 4893 pDynamicOffsets ); 4894 } 4895 4896 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4897 template <typename Dispatch> bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,Dispatch const & d) const4898 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4899 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 4900 uint32_t firstSet, 4901 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4902 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets, 4903 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4904 { 4905 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4906 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4907 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets && "Function <vkCmdBindDescriptorSets> requires <VK_VERSION_1_0>" ); 4908 # endif 4909 4910 d.vkCmdBindDescriptorSets( m_commandBuffer, 4911 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 4912 static_cast<VkPipelineLayout>( layout ), 4913 firstSet, 4914 descriptorSets.size(), 4915 reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), 4916 dynamicOffsets.size(), 4917 dynamicOffsets.data() ); 4918 } 4919 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4920 4921 template <typename Dispatch> bindIndexBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const4922 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 4923 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4924 VULKAN_HPP_NAMESPACE::IndexType indexType, 4925 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4926 { 4927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4928 d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) ); 4929 } 4930 4931 template <typename Dispatch> bindVertexBuffers(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const4932 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 4933 uint32_t bindingCount, 4934 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 4935 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 4936 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4937 { 4938 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4939 d.vkCmdBindVertexBuffers( 4940 m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); 4941 } 4942 4943 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4944 template <typename Dispatch> bindVertexBuffers(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const4945 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 4946 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 4947 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 4948 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 4949 { 4950 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4951 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 4952 VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers && "Function <vkCmdBindVertexBuffers> requires <VK_VERSION_1_0>" ); 4953 # endif 4954 # ifdef VULKAN_HPP_NO_EXCEPTIONS 4955 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 4956 # else 4957 if ( buffers.size() != offsets.size() ) 4958 { 4959 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); 4960 } 4961 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 4962 4963 d.vkCmdBindVertexBuffers( m_commandBuffer, 4964 firstBinding, 4965 buffers.size(), 4966 reinterpret_cast<const VkBuffer *>( buffers.data() ), 4967 reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); 4968 } 4969 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4970 4971 template <typename Dispatch> draw(uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance,Dispatch const & d) const4972 VULKAN_HPP_INLINE void CommandBuffer::draw( 4973 uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4974 { 4975 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4976 d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); 4977 } 4978 4979 template <typename Dispatch> drawIndexed(uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance,Dispatch const & d) const4980 VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, 4981 uint32_t instanceCount, 4982 uint32_t firstIndex, 4983 int32_t vertexOffset, 4984 uint32_t firstInstance, 4985 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4986 { 4987 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4988 d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); 4989 } 4990 4991 template <typename Dispatch> drawIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const4992 VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 4993 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4994 uint32_t drawCount, 4995 uint32_t stride, 4996 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4997 { 4998 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4999 d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 5000 } 5001 5002 template <typename Dispatch> drawIndexedIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const5003 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 5004 VULKAN_HPP_NAMESPACE::DeviceSize offset, 5005 uint32_t drawCount, 5006 uint32_t stride, 5007 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5008 { 5009 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5010 d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 5011 } 5012 5013 template <typename Dispatch> 5014 VULKAN_HPP_INLINE void dispatch(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const5015 CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5016 { 5017 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5018 d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 5019 } 5020 5021 template <typename Dispatch> dispatchIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const5022 VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 5023 VULKAN_HPP_NAMESPACE::DeviceSize offset, 5024 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5025 { 5026 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5027 d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); 5028 } 5029 5030 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,Dispatch const & d) const5031 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5032 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5033 uint32_t regionCount, 5034 const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, 5035 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5036 { 5037 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5038 d.vkCmdCopyBuffer( m_commandBuffer, 5039 static_cast<VkBuffer>( srcBuffer ), 5040 static_cast<VkBuffer>( dstBuffer ), 5041 regionCount, 5042 reinterpret_cast<const VkBufferCopy *>( pRegions ) ); 5043 } 5044 5045 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5046 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,Dispatch const & d) const5047 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5048 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5049 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, 5050 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5051 { 5052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5053 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5054 VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer && "Function <vkCmdCopyBuffer> requires <VK_VERSION_1_0>" ); 5055 # endif 5056 5057 d.vkCmdCopyBuffer( m_commandBuffer, 5058 static_cast<VkBuffer>( srcBuffer ), 5059 static_cast<VkBuffer>( dstBuffer ), 5060 regions.size(), 5061 reinterpret_cast<const VkBufferCopy *>( regions.data() ) ); 5062 } 5063 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5064 5065 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,Dispatch const & d) const5066 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5067 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5068 VULKAN_HPP_NAMESPACE::Image dstImage, 5069 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5070 uint32_t regionCount, 5071 const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, 5072 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5073 { 5074 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5075 d.vkCmdCopyImage( m_commandBuffer, 5076 static_cast<VkImage>( srcImage ), 5077 static_cast<VkImageLayout>( srcImageLayout ), 5078 static_cast<VkImage>( dstImage ), 5079 static_cast<VkImageLayout>( dstImageLayout ), 5080 regionCount, 5081 reinterpret_cast<const VkImageCopy *>( pRegions ) ); 5082 } 5083 5084 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5085 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,Dispatch const & d) const5086 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5087 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5088 VULKAN_HPP_NAMESPACE::Image dstImage, 5089 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5090 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, 5091 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5092 { 5093 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5094 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5095 VULKAN_HPP_ASSERT( d.vkCmdCopyImage && "Function <vkCmdCopyImage> requires <VK_VERSION_1_0>" ); 5096 # endif 5097 5098 d.vkCmdCopyImage( m_commandBuffer, 5099 static_cast<VkImage>( srcImage ), 5100 static_cast<VkImageLayout>( srcImageLayout ), 5101 static_cast<VkImage>( dstImage ), 5102 static_cast<VkImageLayout>( dstImageLayout ), 5103 regions.size(), 5104 reinterpret_cast<const VkImageCopy *>( regions.data() ) ); 5105 } 5106 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5107 5108 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const5109 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5110 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5111 VULKAN_HPP_NAMESPACE::Image dstImage, 5112 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5113 uint32_t regionCount, 5114 const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, 5115 VULKAN_HPP_NAMESPACE::Filter filter, 5116 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5117 { 5118 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5119 d.vkCmdBlitImage( m_commandBuffer, 5120 static_cast<VkImage>( srcImage ), 5121 static_cast<VkImageLayout>( srcImageLayout ), 5122 static_cast<VkImage>( dstImage ), 5123 static_cast<VkImageLayout>( dstImageLayout ), 5124 regionCount, 5125 reinterpret_cast<const VkImageBlit *>( pRegions ), 5126 static_cast<VkFilter>( filter ) ); 5127 } 5128 5129 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5130 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const5131 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5132 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5133 VULKAN_HPP_NAMESPACE::Image dstImage, 5134 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5135 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, 5136 VULKAN_HPP_NAMESPACE::Filter filter, 5137 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5138 { 5139 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5140 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5141 VULKAN_HPP_ASSERT( d.vkCmdBlitImage && "Function <vkCmdBlitImage> requires <VK_VERSION_1_0>" ); 5142 # endif 5143 5144 d.vkCmdBlitImage( m_commandBuffer, 5145 static_cast<VkImage>( srcImage ), 5146 static_cast<VkImageLayout>( srcImageLayout ), 5147 static_cast<VkImage>( dstImage ), 5148 static_cast<VkImageLayout>( dstImageLayout ), 5149 regions.size(), 5150 reinterpret_cast<const VkImageBlit *>( regions.data() ), 5151 static_cast<VkFilter>( filter ) ); 5152 } 5153 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5154 5155 template <typename Dispatch> copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const5156 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5157 VULKAN_HPP_NAMESPACE::Image dstImage, 5158 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5159 uint32_t regionCount, 5160 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 5161 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5162 { 5163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5164 d.vkCmdCopyBufferToImage( m_commandBuffer, 5165 static_cast<VkBuffer>( srcBuffer ), 5166 static_cast<VkImage>( dstImage ), 5167 static_cast<VkImageLayout>( dstImageLayout ), 5168 regionCount, 5169 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 5170 } 5171 5172 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5173 template <typename Dispatch> copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const5174 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 5175 VULKAN_HPP_NAMESPACE::Image dstImage, 5176 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5177 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 5178 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5179 { 5180 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5181 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5182 VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage && "Function <vkCmdCopyBufferToImage> requires <VK_VERSION_1_0>" ); 5183 # endif 5184 5185 d.vkCmdCopyBufferToImage( m_commandBuffer, 5186 static_cast<VkBuffer>( srcBuffer ), 5187 static_cast<VkImage>( dstImage ), 5188 static_cast<VkImageLayout>( dstImageLayout ), 5189 regions.size(), 5190 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 5191 } 5192 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5193 5194 template <typename Dispatch> copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const5195 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 5196 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5197 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5198 uint32_t regionCount, 5199 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 5200 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5201 { 5202 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5203 d.vkCmdCopyImageToBuffer( m_commandBuffer, 5204 static_cast<VkImage>( srcImage ), 5205 static_cast<VkImageLayout>( srcImageLayout ), 5206 static_cast<VkBuffer>( dstBuffer ), 5207 regionCount, 5208 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 5209 } 5210 5211 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5212 template <typename Dispatch> copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const5213 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 5214 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5215 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5216 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 5217 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5218 { 5219 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5220 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5221 VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer && "Function <vkCmdCopyImageToBuffer> requires <VK_VERSION_1_0>" ); 5222 # endif 5223 5224 d.vkCmdCopyImageToBuffer( m_commandBuffer, 5225 static_cast<VkImage>( srcImage ), 5226 static_cast<VkImageLayout>( srcImageLayout ), 5227 static_cast<VkBuffer>( dstBuffer ), 5228 regions.size(), 5229 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 5230 } 5231 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5232 5233 template <typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize dataSize,const void * pData,Dispatch const & d) const5234 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5235 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5236 VULKAN_HPP_NAMESPACE::DeviceSize dataSize, 5237 const void * pData, 5238 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5239 { 5240 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5241 d.vkCmdUpdateBuffer( 5242 m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData ); 5243 } 5244 5245 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5246 template <typename DataType, typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data,Dispatch const & d) const5247 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5248 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5249 VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data, 5250 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5251 { 5252 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5253 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5254 VULKAN_HPP_ASSERT( d.vkCmdUpdateBuffer && "Function <vkCmdUpdateBuffer> requires <VK_VERSION_1_0>" ); 5255 # endif 5256 5257 d.vkCmdUpdateBuffer( m_commandBuffer, 5258 static_cast<VkBuffer>( dstBuffer ), 5259 static_cast<VkDeviceSize>( dstOffset ), 5260 data.size() * sizeof( DataType ), 5261 reinterpret_cast<const void *>( data.data() ) ); 5262 } 5263 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5264 5265 template <typename Dispatch> fillBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize size,uint32_t data,Dispatch const & d) const5266 VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5267 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5268 VULKAN_HPP_NAMESPACE::DeviceSize size, 5269 uint32_t data, 5270 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5271 { 5272 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5273 d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data ); 5274 } 5275 5276 template <typename Dispatch> clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const5277 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 5278 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5279 const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, 5280 uint32_t rangeCount, 5281 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 5282 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5283 { 5284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5285 d.vkCmdClearColorImage( m_commandBuffer, 5286 static_cast<VkImage>( image ), 5287 static_cast<VkImageLayout>( imageLayout ), 5288 reinterpret_cast<const VkClearColorValue *>( pColor ), 5289 rangeCount, 5290 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 5291 } 5292 5293 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5294 template <typename Dispatch> clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue & color,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const5295 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 5296 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5297 const VULKAN_HPP_NAMESPACE::ClearColorValue & color, 5298 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 5299 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5300 { 5301 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5302 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5303 VULKAN_HPP_ASSERT( d.vkCmdClearColorImage && "Function <vkCmdClearColorImage> requires <VK_VERSION_1_0>" ); 5304 # endif 5305 5306 d.vkCmdClearColorImage( m_commandBuffer, 5307 static_cast<VkImage>( image ), 5308 static_cast<VkImageLayout>( imageLayout ), 5309 reinterpret_cast<const VkClearColorValue *>( &color ), 5310 ranges.size(), 5311 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 5312 } 5313 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5314 5315 template <typename Dispatch> clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const5316 VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 5317 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5318 const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, 5319 uint32_t rangeCount, 5320 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 5321 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5322 { 5323 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5324 d.vkCmdClearDepthStencilImage( m_commandBuffer, 5325 static_cast<VkImage>( image ), 5326 static_cast<VkImageLayout>( imageLayout ), 5327 reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), 5328 rangeCount, 5329 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 5330 } 5331 5332 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5333 template <typename Dispatch> 5334 VULKAN_HPP_INLINE void clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const5335 CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 5336 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 5337 const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, 5338 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 5339 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5340 { 5341 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5342 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5343 VULKAN_HPP_ASSERT( d.vkCmdClearDepthStencilImage && "Function <vkCmdClearDepthStencilImage> requires <VK_VERSION_1_0>" ); 5344 # endif 5345 5346 d.vkCmdClearDepthStencilImage( m_commandBuffer, 5347 static_cast<VkImage>( image ), 5348 static_cast<VkImageLayout>( imageLayout ), 5349 reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), 5350 ranges.size(), 5351 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 5352 } 5353 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5354 5355 template <typename Dispatch> clearAttachments(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,uint32_t rectCount,const VULKAN_HPP_NAMESPACE::ClearRect * pRects,Dispatch const & d) const5356 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, 5357 const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, 5358 uint32_t rectCount, 5359 const VULKAN_HPP_NAMESPACE::ClearRect * pRects, 5360 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5361 { 5362 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5363 d.vkCmdClearAttachments( m_commandBuffer, 5364 attachmentCount, 5365 reinterpret_cast<const VkClearAttachment *>( pAttachments ), 5366 rectCount, 5367 reinterpret_cast<const VkClearRect *>( pRects ) ); 5368 } 5369 5370 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5371 template <typename Dispatch> clearAttachments(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,Dispatch const & d) const5372 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, 5373 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, 5374 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5375 { 5376 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5377 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5378 VULKAN_HPP_ASSERT( d.vkCmdClearAttachments && "Function <vkCmdClearAttachments> requires <VK_VERSION_1_0>" ); 5379 # endif 5380 5381 d.vkCmdClearAttachments( m_commandBuffer, 5382 attachments.size(), 5383 reinterpret_cast<const VkClearAttachment *>( attachments.data() ), 5384 rects.size(), 5385 reinterpret_cast<const VkClearRect *>( rects.data() ) ); 5386 } 5387 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5388 5389 template <typename Dispatch> resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,Dispatch const & d) const5390 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5391 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5392 VULKAN_HPP_NAMESPACE::Image dstImage, 5393 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5394 uint32_t regionCount, 5395 const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, 5396 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5397 { 5398 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5399 d.vkCmdResolveImage( m_commandBuffer, 5400 static_cast<VkImage>( srcImage ), 5401 static_cast<VkImageLayout>( srcImageLayout ), 5402 static_cast<VkImage>( dstImage ), 5403 static_cast<VkImageLayout>( dstImageLayout ), 5404 regionCount, 5405 reinterpret_cast<const VkImageResolve *>( pRegions ) ); 5406 } 5407 5408 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5409 template <typename Dispatch> resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,Dispatch const & d) const5410 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 5411 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 5412 VULKAN_HPP_NAMESPACE::Image dstImage, 5413 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 5414 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, 5415 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5416 { 5417 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5418 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5419 VULKAN_HPP_ASSERT( d.vkCmdResolveImage && "Function <vkCmdResolveImage> requires <VK_VERSION_1_0>" ); 5420 # endif 5421 5422 d.vkCmdResolveImage( m_commandBuffer, 5423 static_cast<VkImage>( srcImage ), 5424 static_cast<VkImageLayout>( srcImageLayout ), 5425 static_cast<VkImage>( dstImage ), 5426 static_cast<VkImageLayout>( dstImageLayout ), 5427 regions.size(), 5428 reinterpret_cast<const VkImageResolve *>( regions.data() ) ); 5429 } 5430 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5431 5432 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const5433 VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, 5434 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 5435 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5436 { 5437 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5438 d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 5439 } 5440 5441 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const5442 VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, 5443 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 5444 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5445 { 5446 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5447 d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 5448 } 5449 5450 template <typename Dispatch> waitEvents(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const5451 VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, 5452 const VULKAN_HPP_NAMESPACE::Event * pEvents, 5453 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5454 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5455 uint32_t memoryBarrierCount, 5456 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 5457 uint32_t bufferMemoryBarrierCount, 5458 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 5459 uint32_t imageMemoryBarrierCount, 5460 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 5461 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5462 { 5463 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5464 d.vkCmdWaitEvents( m_commandBuffer, 5465 eventCount, 5466 reinterpret_cast<const VkEvent *>( pEvents ), 5467 static_cast<VkPipelineStageFlags>( srcStageMask ), 5468 static_cast<VkPipelineStageFlags>( dstStageMask ), 5469 memoryBarrierCount, 5470 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 5471 bufferMemoryBarrierCount, 5472 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 5473 imageMemoryBarrierCount, 5474 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 5475 } 5476 5477 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5478 template <typename Dispatch> 5479 VULKAN_HPP_INLINE void waitEvents(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const5480 CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 5481 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5482 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5483 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 5484 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 5485 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 5486 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5487 { 5488 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5489 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5490 VULKAN_HPP_ASSERT( d.vkCmdWaitEvents && "Function <vkCmdWaitEvents> requires <VK_VERSION_1_0>" ); 5491 # endif 5492 5493 d.vkCmdWaitEvents( m_commandBuffer, 5494 events.size(), 5495 reinterpret_cast<const VkEvent *>( events.data() ), 5496 static_cast<VkPipelineStageFlags>( srcStageMask ), 5497 static_cast<VkPipelineStageFlags>( dstStageMask ), 5498 memoryBarriers.size(), 5499 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 5500 bufferMemoryBarriers.size(), 5501 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 5502 imageMemoryBarriers.size(), 5503 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 5504 } 5505 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5506 5507 template <typename Dispatch> pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const5508 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5509 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5510 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 5511 uint32_t memoryBarrierCount, 5512 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 5513 uint32_t bufferMemoryBarrierCount, 5514 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 5515 uint32_t imageMemoryBarrierCount, 5516 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 5517 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5518 { 5519 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5520 d.vkCmdPipelineBarrier( m_commandBuffer, 5521 static_cast<VkPipelineStageFlags>( srcStageMask ), 5522 static_cast<VkPipelineStageFlags>( dstStageMask ), 5523 static_cast<VkDependencyFlags>( dependencyFlags ), 5524 memoryBarrierCount, 5525 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 5526 bufferMemoryBarrierCount, 5527 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 5528 imageMemoryBarrierCount, 5529 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 5530 } 5531 5532 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5533 template <typename Dispatch> 5534 VULKAN_HPP_INLINE void pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const5535 CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5536 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5537 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 5538 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 5539 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 5540 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 5541 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5542 { 5543 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5544 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5545 VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier && "Function <vkCmdPipelineBarrier> requires <VK_VERSION_1_0>" ); 5546 # endif 5547 5548 d.vkCmdPipelineBarrier( m_commandBuffer, 5549 static_cast<VkPipelineStageFlags>( srcStageMask ), 5550 static_cast<VkPipelineStageFlags>( dstStageMask ), 5551 static_cast<VkDependencyFlags>( dependencyFlags ), 5552 memoryBarriers.size(), 5553 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 5554 bufferMemoryBarriers.size(), 5555 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 5556 imageMemoryBarriers.size(), 5557 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 5558 } 5559 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5560 5561 template <typename Dispatch> beginQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,Dispatch const & d) const5562 VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5563 uint32_t query, 5564 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 5565 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5566 { 5567 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5568 d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) ); 5569 } 5570 5571 template <typename Dispatch> endQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const5572 VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5573 { 5574 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5575 d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query ); 5576 } 5577 5578 template <typename Dispatch> resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const5579 VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5580 uint32_t firstQuery, 5581 uint32_t queryCount, 5582 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5583 { 5584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5585 d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 5586 } 5587 5588 template <typename Dispatch> writeTimestamp(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const5589 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 5590 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5591 uint32_t query, 5592 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5593 { 5594 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5595 d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query ); 5596 } 5597 5598 template <typename Dispatch> copyQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const5599 VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5600 uint32_t firstQuery, 5601 uint32_t queryCount, 5602 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5603 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5604 VULKAN_HPP_NAMESPACE::DeviceSize stride, 5605 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 5606 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5607 { 5608 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5609 d.vkCmdCopyQueryPoolResults( m_commandBuffer, 5610 static_cast<VkQueryPool>( queryPool ), 5611 firstQuery, 5612 queryCount, 5613 static_cast<VkBuffer>( dstBuffer ), 5614 static_cast<VkDeviceSize>( dstOffset ), 5615 static_cast<VkDeviceSize>( stride ), 5616 static_cast<VkQueryResultFlags>( flags ) ); 5617 } 5618 5619 template <typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues,Dispatch const & d) const5620 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5621 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5622 uint32_t offset, 5623 uint32_t size, 5624 const void * pValues, 5625 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5626 { 5627 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5628 d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues ); 5629 } 5630 5631 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5632 template <typename ValuesType, typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values,Dispatch const & d) const5633 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5634 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5635 uint32_t offset, 5636 VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values, 5637 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5638 { 5639 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5640 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5641 VULKAN_HPP_ASSERT( d.vkCmdPushConstants && "Function <vkCmdPushConstants> requires <VK_VERSION_1_0>" ); 5642 # endif 5643 5644 d.vkCmdPushConstants( m_commandBuffer, 5645 static_cast<VkPipelineLayout>( layout ), 5646 static_cast<VkShaderStageFlags>( stageFlags ), 5647 offset, 5648 values.size() * sizeof( ValuesType ), 5649 reinterpret_cast<const void *>( values.data() ) ); 5650 } 5651 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5652 5653 template <typename Dispatch> beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5654 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 5655 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5656 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5657 { 5658 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5659 d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) ); 5660 } 5661 5662 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5663 template <typename Dispatch> beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5664 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 5665 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5666 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5667 { 5668 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5669 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5670 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass && "Function <vkCmdBeginRenderPass> requires <VK_VERSION_1_0>" ); 5671 # endif 5672 5673 d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) ); 5674 } 5675 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5676 5677 template <typename Dispatch> nextSubpass(VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5678 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5679 { 5680 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5681 d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) ); 5682 } 5683 5684 template <typename Dispatch> endRenderPass(Dispatch const & d) const5685 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5686 { 5687 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5688 d.vkCmdEndRenderPass( m_commandBuffer ); 5689 } 5690 5691 template <typename Dispatch> executeCommands(uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const5692 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, 5693 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 5694 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5695 { 5696 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5697 d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 5698 } 5699 5700 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5701 template <typename Dispatch> executeCommands(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const5702 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 5703 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5704 { 5705 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5706 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5707 VULKAN_HPP_ASSERT( d.vkCmdExecuteCommands && "Function <vkCmdExecuteCommands> requires <VK_VERSION_1_0>" ); 5708 # endif 5709 5710 d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 5711 } 5712 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5713 5714 //=== VK_VERSION_1_1 === 5715 5716 template <typename Dispatch> enumerateInstanceVersion(uint32_t * pApiVersion,Dispatch const & d)5717 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT 5718 { 5719 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5720 return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) ); 5721 } 5722 5723 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5724 template <typename Dispatch> enumerateInstanceVersion(Dispatch const & d)5725 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d ) 5726 { 5727 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5728 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5729 VULKAN_HPP_ASSERT( d.vkEnumerateInstanceVersion && "Function <vkEnumerateInstanceVersion> requires <VK_VERSION_1_1>" ); 5730 # endif 5731 5732 uint32_t apiVersion; 5733 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceVersion( &apiVersion ) ); 5734 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); 5735 5736 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( apiVersion ) ); 5737 } 5738 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5739 5740 template <typename Dispatch> bindBufferMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const5741 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, 5742 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 5743 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5744 { 5745 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5746 return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 5747 } 5748 5749 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5750 template <typename Dispatch> 5751 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const5752 Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const 5753 { 5754 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5755 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5756 VULKAN_HPP_ASSERT( d.vkBindBufferMemory2 && "Function <vkBindBufferMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 5757 # endif 5758 5759 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5760 d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 5761 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); 5762 5763 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 5764 } 5765 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5766 5767 template <typename Dispatch> bindImageMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const5768 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, 5769 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 5770 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5771 { 5772 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5773 return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 5774 } 5775 5776 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5777 template <typename Dispatch> 5778 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const5779 Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const 5780 { 5781 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5782 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5783 VULKAN_HPP_ASSERT( d.vkBindImageMemory2 && "Function <vkBindImageMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 5784 # endif 5785 5786 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5787 d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 5788 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); 5789 5790 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 5791 } 5792 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5793 5794 template <typename Dispatch> getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const5795 VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, 5796 uint32_t localDeviceIndex, 5797 uint32_t remoteDeviceIndex, 5798 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 5799 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5800 { 5801 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5802 d.vkGetDeviceGroupPeerMemoryFeatures( 5803 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 5804 } 5805 5806 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5807 template <typename Dispatch> getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const5808 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( 5809 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5810 { 5811 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5812 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5813 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeatures && 5814 "Function <vkGetDeviceGroupPeerMemoryFeatures> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" ); 5815 # endif 5816 5817 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 5818 d.vkGetDeviceGroupPeerMemoryFeatures( 5819 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 5820 5821 return peerMemoryFeatures; 5822 } 5823 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5824 5825 template <typename Dispatch> setDeviceMask(uint32_t deviceMask,Dispatch const & d) const5826 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5827 { 5828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5829 d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); 5830 } 5831 5832 template <typename Dispatch> dispatchBase(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const5833 VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, 5834 uint32_t baseGroupY, 5835 uint32_t baseGroupZ, 5836 uint32_t groupCountX, 5837 uint32_t groupCountY, 5838 uint32_t groupCountZ, 5839 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5840 { 5841 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5842 d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 5843 } 5844 5845 template <typename Dispatch> 5846 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumeratePhysicalDeviceGroups(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const5847 Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, 5848 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 5849 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5850 { 5851 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5852 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( 5853 m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 5854 } 5855 5856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5857 template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> 5858 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 5859 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(Dispatch const & d) const5860 Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const 5861 { 5862 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5863 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5864 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && 5865 "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 5866 # endif 5867 5868 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 5869 uint32_t physicalDeviceGroupCount; 5870 VULKAN_HPP_NAMESPACE::Result result; 5871 do 5872 { 5873 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 5874 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 5875 { 5876 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5877 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( 5878 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 5879 } 5880 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 5881 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 5882 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 5883 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 5884 { 5885 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5886 } 5887 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 5888 } 5889 5890 template <typename PhysicalDeviceGroupPropertiesAllocator, 5891 typename Dispatch, 5892 typename std::enable_if< 5893 std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, 5894 int>::type> 5895 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 5896 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const5897 Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 5898 { 5899 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5900 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5901 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && 5902 "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 5903 # endif 5904 5905 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 5906 physicalDeviceGroupPropertiesAllocator ); 5907 uint32_t physicalDeviceGroupCount; 5908 VULKAN_HPP_NAMESPACE::Result result; 5909 do 5910 { 5911 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 5912 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 5913 { 5914 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5915 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( 5916 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 5917 } 5918 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 5919 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 5920 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 5921 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 5922 { 5923 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5924 } 5925 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 5926 } 5927 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5928 5929 template <typename Dispatch> getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5930 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 5931 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 5932 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5933 { 5934 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5935 d.vkGetImageMemoryRequirements2( 5936 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 5937 } 5938 5939 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5940 template <typename Dispatch> 5941 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const5942 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5943 { 5944 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5945 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5946 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && 5947 "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 5948 # endif 5949 5950 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 5951 d.vkGetImageMemoryRequirements2( 5952 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5953 5954 return memoryRequirements; 5955 } 5956 5957 template <typename X, typename Y, typename... Z, typename Dispatch> 5958 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const5959 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5960 { 5961 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5962 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5963 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && 5964 "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 5965 # endif 5966 5967 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 5968 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 5969 d.vkGetImageMemoryRequirements2( 5970 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5971 5972 return structureChain; 5973 } 5974 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5975 5976 template <typename Dispatch> getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5977 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 5978 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 5979 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5980 { 5981 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5982 d.vkGetBufferMemoryRequirements2( 5983 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 5984 } 5985 5986 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5987 template <typename Dispatch> 5988 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const5989 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5990 { 5991 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5992 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 5993 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && 5994 "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 5995 # endif 5996 5997 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 5998 d.vkGetBufferMemoryRequirements2( 5999 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 6000 6001 return memoryRequirements; 6002 } 6003 6004 template <typename X, typename Y, typename... Z, typename Dispatch> 6005 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const6006 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6007 { 6008 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6009 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6010 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && 6011 "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6012 # endif 6013 6014 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6015 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 6016 d.vkGetBufferMemoryRequirements2( 6017 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 6018 6019 return structureChain; 6020 } 6021 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6022 6023 template <typename Dispatch> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const6024 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 6025 uint32_t * pSparseMemoryRequirementCount, 6026 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 6027 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6028 { 6029 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6030 d.vkGetImageSparseMemoryRequirements2( m_device, 6031 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 6032 pSparseMemoryRequirementCount, 6033 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 6034 } 6035 6036 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6037 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 6038 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const6039 Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const 6040 { 6041 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6042 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6043 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && 6044 "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6045 # endif 6046 6047 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 6048 uint32_t sparseMemoryRequirementCount; 6049 d.vkGetImageSparseMemoryRequirements2( 6050 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 6051 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6052 d.vkGetImageSparseMemoryRequirements2( m_device, 6053 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 6054 &sparseMemoryRequirementCount, 6055 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 6056 6057 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 6058 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 6059 { 6060 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6061 } 6062 return sparseMemoryRequirements; 6063 } 6064 6065 template <typename SparseImageMemoryRequirements2Allocator, 6066 typename Dispatch, 6067 typename std::enable_if< 6068 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 6069 int>::type> 6070 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const6071 Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, 6072 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 6073 Dispatch const & d ) const 6074 { 6075 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6076 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6077 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && 6078 "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 6079 # endif 6080 6081 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 6082 sparseImageMemoryRequirements2Allocator ); 6083 uint32_t sparseMemoryRequirementCount; 6084 d.vkGetImageSparseMemoryRequirements2( 6085 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 6086 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6087 d.vkGetImageSparseMemoryRequirements2( m_device, 6088 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 6089 &sparseMemoryRequirementCount, 6090 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 6091 6092 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 6093 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 6094 { 6095 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 6096 } 6097 return sparseMemoryRequirements; 6098 } 6099 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6100 6101 template <typename Dispatch> getFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const6102 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6103 { 6104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6105 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 6106 } 6107 6108 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6109 template <typename Dispatch> 6110 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const & d) const6111 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6112 { 6113 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6114 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6115 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && 6116 "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6117 # endif 6118 6119 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 6120 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 6121 6122 return features; 6123 } 6124 6125 template <typename X, typename Y, typename... Z, typename Dispatch> 6126 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2(Dispatch const & d) const6127 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6128 { 6129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6130 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6131 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && 6132 "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6133 # endif 6134 6135 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6136 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 6137 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 6138 6139 return structureChain; 6140 } 6141 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6142 6143 template <typename Dispatch> getProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const6144 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 6145 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6146 { 6147 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6148 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 6149 } 6150 6151 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6152 template <typename Dispatch> 6153 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const & d) const6154 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6155 { 6156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6157 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6158 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && 6159 "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6160 # endif 6161 6162 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 6163 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 6164 6165 return properties; 6166 } 6167 6168 template <typename X, typename Y, typename... Z, typename Dispatch> 6169 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2(Dispatch const & d) const6170 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6171 { 6172 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6173 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6174 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && 6175 "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6176 # endif 6177 6178 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6179 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 6180 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 6181 6182 return structureChain; 6183 } 6184 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6185 6186 template <typename Dispatch> getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const6187 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, 6188 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 6189 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6190 { 6191 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6192 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 6193 } 6194 6195 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6196 template <typename Dispatch> 6197 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const6198 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6199 { 6200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6201 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6202 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && 6203 "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6204 # endif 6205 6206 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 6207 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 6208 6209 return formatProperties; 6210 } 6211 6212 template <typename X, typename Y, typename... Z, typename Dispatch> 6213 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const6214 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6215 { 6216 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6217 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6218 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && 6219 "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6220 # endif 6221 6222 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6223 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 6224 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 6225 6226 return structureChain; 6227 } 6228 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6229 6230 template <typename Dispatch> 6231 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const6232 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 6233 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 6234 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6235 { 6236 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6237 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 6238 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 6239 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 6240 } 6241 6242 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6243 template <typename Dispatch> 6244 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const6245 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 6246 { 6247 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6248 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6249 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && 6250 "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6251 # endif 6252 6253 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 6254 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6255 d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 6256 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 6257 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 6258 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 6259 6260 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 6261 } 6262 6263 template <typename X, typename Y, typename... Z, typename Dispatch> 6264 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const6265 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 6266 { 6267 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6268 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6269 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && 6270 "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6271 # endif 6272 6273 StructureChain<X, Y, Z...> structureChain; 6274 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 6275 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6276 d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 6277 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 6278 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 6279 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 6280 6281 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 6282 } 6283 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6284 6285 template <typename Dispatch> getQueueFamilyProperties2(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const6286 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, 6287 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 6288 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6289 { 6290 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6291 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6292 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 6293 } 6294 6295 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6296 template <typename QueueFamilyProperties2Allocator, typename Dispatch> 6297 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(Dispatch const & d) const6298 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 6299 { 6300 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6301 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6302 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6303 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6304 # endif 6305 6306 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 6307 uint32_t queueFamilyPropertyCount; 6308 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6309 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6310 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6311 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6312 6313 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6314 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6315 { 6316 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6317 } 6318 return queueFamilyProperties; 6319 } 6320 6321 template < 6322 typename QueueFamilyProperties2Allocator, 6323 typename Dispatch, 6324 typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 6325 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const6326 PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const 6327 { 6328 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6329 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6330 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6331 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6332 # endif 6333 6334 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); 6335 uint32_t queueFamilyPropertyCount; 6336 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6337 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6338 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6339 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6340 6341 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6342 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6343 { 6344 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6345 } 6346 return queueFamilyProperties; 6347 } 6348 6349 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 6350 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(Dispatch const & d) const6351 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 6352 { 6353 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6354 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6355 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6356 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6357 # endif 6358 6359 std::vector<StructureChain, StructureChainAllocator> structureChains; 6360 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 6361 uint32_t queueFamilyPropertyCount; 6362 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6363 structureChains.resize( queueFamilyPropertyCount ); 6364 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6365 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6366 { 6367 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 6368 } 6369 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6370 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6371 6372 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6373 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6374 { 6375 structureChains.resize( queueFamilyPropertyCount ); 6376 } 6377 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6378 { 6379 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 6380 } 6381 return structureChains; 6382 } 6383 6384 template <typename StructureChain, 6385 typename StructureChainAllocator, 6386 typename Dispatch, 6387 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 6388 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const6389 PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const 6390 { 6391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6392 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6393 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && 6394 "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6395 # endif 6396 6397 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 6398 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 6399 uint32_t queueFamilyPropertyCount; 6400 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 6401 structureChains.resize( queueFamilyPropertyCount ); 6402 queueFamilyProperties.resize( queueFamilyPropertyCount ); 6403 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6404 { 6405 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 6406 } 6407 d.vkGetPhysicalDeviceQueueFamilyProperties2( 6408 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 6409 6410 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 6411 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 6412 { 6413 structureChains.resize( queueFamilyPropertyCount ); 6414 } 6415 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 6416 { 6417 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 6418 } 6419 return structureChains; 6420 } 6421 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6422 6423 template <typename Dispatch> getMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const6424 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 6425 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6426 { 6427 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6428 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 6429 } 6430 6431 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6432 template <typename Dispatch> 6433 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const & d) const6434 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6435 { 6436 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6437 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6438 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && 6439 "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6440 # endif 6441 6442 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 6443 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 6444 6445 return memoryProperties; 6446 } 6447 6448 template <typename X, typename Y, typename... Z, typename Dispatch> 6449 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2(Dispatch const & d) const6450 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6451 { 6452 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6453 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6454 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && 6455 "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6456 # endif 6457 6458 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6459 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 6460 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 6461 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 6462 6463 return structureChain; 6464 } 6465 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6466 6467 template <typename Dispatch> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const6468 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 6469 uint32_t * pPropertyCount, 6470 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 6471 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6472 { 6473 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6474 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 6475 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 6476 pPropertyCount, 6477 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 6478 } 6479 6480 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6481 template <typename SparseImageFormatProperties2Allocator, typename Dispatch> 6482 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const6483 PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const 6484 { 6485 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6486 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6487 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && 6488 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6489 # endif 6490 6491 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 6492 uint32_t propertyCount; 6493 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 6494 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 6495 properties.resize( propertyCount ); 6496 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 6497 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 6498 &propertyCount, 6499 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 6500 6501 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 6502 if ( propertyCount < properties.size() ) 6503 { 6504 properties.resize( propertyCount ); 6505 } 6506 return properties; 6507 } 6508 6509 template < 6510 typename SparseImageFormatProperties2Allocator, 6511 typename Dispatch, 6512 typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, 6513 int>::type> 6514 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const6515 PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 6516 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 6517 Dispatch const & d ) const 6518 { 6519 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6520 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6521 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && 6522 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 6523 # endif 6524 6525 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); 6526 uint32_t propertyCount; 6527 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 6528 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 6529 properties.resize( propertyCount ); 6530 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 6531 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 6532 &propertyCount, 6533 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 6534 6535 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 6536 if ( propertyCount < properties.size() ) 6537 { 6538 properties.resize( propertyCount ); 6539 } 6540 return properties; 6541 } 6542 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6543 6544 template <typename Dispatch> trimCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const6545 VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 6546 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 6547 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6548 { 6549 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6550 d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 6551 } 6552 6553 template <typename Dispatch> getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const6554 VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, 6555 VULKAN_HPP_NAMESPACE::Queue * pQueue, 6556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6557 { 6558 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6559 d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) ); 6560 } 6561 6562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6563 template <typename Dispatch> getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,Dispatch const & d) const6564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, 6565 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6566 { 6567 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6568 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6569 VULKAN_HPP_ASSERT( d.vkGetDeviceQueue2 && "Function <vkGetDeviceQueue2> requires <VK_VERSION_1_1>" ); 6570 # endif 6571 6572 VULKAN_HPP_NAMESPACE::Queue queue; 6573 d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) ); 6574 6575 return queue; 6576 } 6577 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6578 6579 template <typename Dispatch> 6580 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const6581 Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 6582 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6583 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 6584 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6585 { 6586 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6587 return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, 6588 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 6589 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6590 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 6591 } 6592 6593 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6594 template <typename Dispatch> 6595 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6596 Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 6597 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6598 Dispatch const & d ) const 6599 { 6600 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6601 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6602 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && 6603 "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6604 # endif 6605 6606 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 6607 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion( 6608 m_device, 6609 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 6610 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6611 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 6612 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); 6613 6614 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); 6615 } 6616 6617 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6618 template <typename Dispatch> 6619 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6620 Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 6621 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6622 Dispatch const & d ) const 6623 { 6624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6625 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6626 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && 6627 "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6628 # endif 6629 6630 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 6631 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion( 6632 m_device, 6633 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 6634 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6635 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 6636 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); 6637 6638 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 6639 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 6640 } 6641 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 6642 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6643 6644 template <typename Dispatch> destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6645 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6646 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6647 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6648 { 6649 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6650 d.vkDestroySamplerYcbcrConversion( 6651 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6652 } 6653 6654 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6655 template <typename Dispatch> destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6656 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6657 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6658 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6659 { 6660 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6661 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6662 VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && 6663 "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6664 # endif 6665 6666 d.vkDestroySamplerYcbcrConversion( 6667 m_device, 6668 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6669 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6670 } 6671 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6672 6673 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6674 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6675 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6676 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6677 { 6678 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6679 d.vkDestroySamplerYcbcrConversion( 6680 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6681 } 6682 6683 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6684 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6685 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6686 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6687 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6688 { 6689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6690 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6691 VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && 6692 "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 6693 # endif 6694 6695 d.vkDestroySamplerYcbcrConversion( 6696 m_device, 6697 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6698 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6699 } 6700 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6701 6702 template <typename Dispatch> 6703 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const6704 Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 6705 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6706 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 6707 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6708 { 6709 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6710 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, 6711 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 6712 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6713 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 6714 } 6715 6716 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6717 template <typename Dispatch> 6718 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6719 Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 6720 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6721 Dispatch const & d ) const 6722 { 6723 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6724 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6725 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && 6726 "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6727 # endif 6728 6729 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 6730 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate( 6731 m_device, 6732 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 6733 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6734 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 6735 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); 6736 6737 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); 6738 } 6739 6740 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6741 template <typename Dispatch> 6742 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6743 Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 6744 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6745 Dispatch const & d ) const 6746 { 6747 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6748 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6749 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && 6750 "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6751 # endif 6752 6753 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 6754 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate( 6755 m_device, 6756 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 6757 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6758 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 6759 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); 6760 6761 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 6762 UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 6763 descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 6764 } 6765 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 6766 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6767 6768 template <typename Dispatch> destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6769 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6770 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6771 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6772 { 6773 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6774 d.vkDestroyDescriptorUpdateTemplate( 6775 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6776 } 6777 6778 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6779 template <typename Dispatch> destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6780 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6781 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6782 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6783 { 6784 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6785 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6786 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && 6787 "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6788 # endif 6789 6790 d.vkDestroyDescriptorUpdateTemplate( 6791 m_device, 6792 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6793 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6794 } 6795 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6796 6797 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6798 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6799 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6800 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6801 { 6802 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6803 d.vkDestroyDescriptorUpdateTemplate( 6804 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6805 } 6806 6807 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6808 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6809 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6810 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6811 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6812 { 6813 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6814 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6815 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && 6816 "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6817 # endif 6818 6819 d.vkDestroyDescriptorUpdateTemplate( 6820 m_device, 6821 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6822 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6823 } 6824 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6825 6826 template <typename Dispatch> updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const6827 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 6828 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6829 const void * pData, 6830 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6831 { 6832 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6833 d.vkUpdateDescriptorSetWithTemplate( 6834 m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); 6835 } 6836 6837 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6838 template <typename DataType, typename Dispatch> updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,DataType const & data,Dispatch const & d) const6839 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 6840 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6841 DataType const & data, 6842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6843 { 6844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6845 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6846 VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplate && 6847 "Function <vkUpdateDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 6848 # endif 6849 6850 d.vkUpdateDescriptorSetWithTemplate( m_device, 6851 static_cast<VkDescriptorSet>( descriptorSet ), 6852 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6853 reinterpret_cast<const void *>( &data ) ); 6854 } 6855 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6856 6857 template <typename Dispatch> getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const6858 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 6859 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 6860 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6861 { 6862 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6863 d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, 6864 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 6865 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 6866 } 6867 6868 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6869 template <typename Dispatch> 6870 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const6871 PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, 6872 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6873 { 6874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6875 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6876 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferProperties && 6877 "Function <vkGetPhysicalDeviceExternalBufferProperties> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" ); 6878 # endif 6879 6880 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 6881 d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, 6882 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 6883 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 6884 6885 return externalBufferProperties; 6886 } 6887 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6888 6889 template <typename Dispatch> getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const6890 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 6891 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 6892 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6893 { 6894 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6895 d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, 6896 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 6897 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 6898 } 6899 6900 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6901 template <typename Dispatch> 6902 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const6903 PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, 6904 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6905 { 6906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6907 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6908 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFenceProperties && 6909 "Function <vkGetPhysicalDeviceExternalFenceProperties> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" ); 6910 # endif 6911 6912 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 6913 d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, 6914 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 6915 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 6916 6917 return externalFenceProperties; 6918 } 6919 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6920 6921 template <typename Dispatch> 6922 VULKAN_HPP_INLINE void getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const6923 PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 6924 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 6925 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6926 { 6927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6928 d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, 6929 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 6930 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 6931 } 6932 6933 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6934 template <typename Dispatch> 6935 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const6936 PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, 6937 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6938 { 6939 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6940 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6941 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphoreProperties && 6942 "Function <vkGetPhysicalDeviceExternalSemaphoreProperties> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" ); 6943 # endif 6944 6945 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 6946 d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, 6947 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 6948 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 6949 6950 return externalSemaphoreProperties; 6951 } 6952 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6953 6954 template <typename Dispatch> getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const6955 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 6956 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 6957 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6958 { 6959 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6960 d.vkGetDescriptorSetLayoutSupport( 6961 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 6962 } 6963 6964 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6965 template <typename Dispatch> 6966 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const6967 Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 6968 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6969 { 6970 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6971 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6972 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 6973 # endif 6974 6975 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 6976 d.vkGetDescriptorSetLayoutSupport( 6977 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 6978 6979 return support; 6980 } 6981 6982 template <typename X, typename Y, typename... Z, typename Dispatch> 6983 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const6984 Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 6985 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6986 { 6987 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6988 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 6989 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 6990 # endif 6991 6992 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6993 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 6994 d.vkGetDescriptorSetLayoutSupport( 6995 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 6996 6997 return structureChain; 6998 } 6999 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7000 7001 //=== VK_VERSION_1_2 === 7002 7003 template <typename Dispatch> drawIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const7004 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 7005 VULKAN_HPP_NAMESPACE::DeviceSize offset, 7006 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 7007 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 7008 uint32_t maxDrawCount, 7009 uint32_t stride, 7010 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7011 { 7012 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7013 d.vkCmdDrawIndirectCount( m_commandBuffer, 7014 static_cast<VkBuffer>( buffer ), 7015 static_cast<VkDeviceSize>( offset ), 7016 static_cast<VkBuffer>( countBuffer ), 7017 static_cast<VkDeviceSize>( countBufferOffset ), 7018 maxDrawCount, 7019 stride ); 7020 } 7021 7022 template <typename Dispatch> drawIndexedIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const7023 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 7024 VULKAN_HPP_NAMESPACE::DeviceSize offset, 7025 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 7026 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 7027 uint32_t maxDrawCount, 7028 uint32_t stride, 7029 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7030 { 7031 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7032 d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, 7033 static_cast<VkBuffer>( buffer ), 7034 static_cast<VkDeviceSize>( offset ), 7035 static_cast<VkBuffer>( countBuffer ), 7036 static_cast<VkDeviceSize>( countBufferOffset ), 7037 maxDrawCount, 7038 stride ); 7039 } 7040 7041 template <typename Dispatch> createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const7042 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 7043 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7044 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 7045 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7046 { 7047 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7048 return static_cast<Result>( d.vkCreateRenderPass2( m_device, 7049 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 7050 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7051 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 7052 } 7053 7054 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7055 template <typename Dispatch> 7056 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7057 Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 7058 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7059 Dispatch const & d ) const 7060 { 7061 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7062 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7063 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7064 # endif 7065 7066 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 7067 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7068 d.vkCreateRenderPass2( m_device, 7069 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 7070 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7071 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 7072 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); 7073 7074 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); 7075 } 7076 7077 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7078 template <typename Dispatch> 7079 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2Unique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7080 Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 7081 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7082 Dispatch const & d ) const 7083 { 7084 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7085 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7086 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7087 # endif 7088 7089 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 7090 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7091 d.vkCreateRenderPass2( m_device, 7092 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 7093 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7094 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 7095 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); 7096 7097 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 7098 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 7099 } 7100 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 7101 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7102 7103 template <typename Dispatch> beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const7104 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 7105 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 7106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7107 { 7108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7109 d.vkCmdBeginRenderPass2( 7110 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 7111 } 7112 7113 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7114 template <typename Dispatch> beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const7115 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 7116 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 7117 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7118 { 7119 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7120 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7121 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2 && "Function <vkCmdBeginRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7122 # endif 7123 7124 d.vkCmdBeginRenderPass2( 7125 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 7126 } 7127 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7128 7129 template <typename Dispatch> nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const7130 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 7131 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 7132 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7133 { 7134 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7135 d.vkCmdNextSubpass2( 7136 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 7137 } 7138 7139 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7140 template <typename Dispatch> nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const7141 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 7142 const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 7143 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7144 { 7145 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7146 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7147 VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2 && "Function <vkCmdNextSubpass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7148 # endif 7149 7150 d.vkCmdNextSubpass2( 7151 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 7152 } 7153 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7154 7155 template <typename Dispatch> endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const7156 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 7157 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7158 { 7159 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7160 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 7161 } 7162 7163 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7164 template <typename Dispatch> endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const7165 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 7166 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7167 { 7168 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7169 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7170 VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2 && "Function <vkCmdEndRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 7171 # endif 7172 7173 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 7174 } 7175 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7176 7177 template <typename Dispatch> 7178 VULKAN_HPP_INLINE void resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const7179 Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7180 { 7181 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7182 d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 7183 } 7184 7185 template <typename Dispatch> getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const7186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 7187 uint64_t * pValue, 7188 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7189 { 7190 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7191 return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); 7192 } 7193 7194 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7195 template <typename Dispatch> getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const7196 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 7197 Dispatch const & d ) const 7198 { 7199 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7200 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7201 VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValue && "Function <vkGetSemaphoreCounterValue> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 7202 # endif 7203 7204 uint64_t value; 7205 VULKAN_HPP_NAMESPACE::Result result = 7206 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 7207 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); 7208 7209 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); 7210 } 7211 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7212 7213 template <typename Dispatch> waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const7214 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 7215 uint64_t timeout, 7216 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7217 { 7218 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7219 return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 7220 } 7221 7222 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7223 template <typename Dispatch> 7224 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const7225 Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const 7226 { 7227 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7228 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7229 VULKAN_HPP_ASSERT( d.vkWaitSemaphores && "Function <vkWaitSemaphores> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 7230 # endif 7231 7232 VULKAN_HPP_NAMESPACE::Result result = 7233 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 7234 VULKAN_HPP_NAMESPACE::detail::resultCheck( 7235 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 7236 7237 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 7238 } 7239 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7240 7241 template <typename Dispatch> signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const7242 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, 7243 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7244 { 7245 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7246 return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 7247 } 7248 7249 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7250 template <typename Dispatch> 7251 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,Dispatch const & d) const7252 Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 7253 { 7254 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7255 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7256 VULKAN_HPP_ASSERT( d.vkSignalSemaphore && "Function <vkSignalSemaphore> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 7257 # endif 7258 7259 VULKAN_HPP_NAMESPACE::Result result = 7260 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 7261 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); 7262 7263 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 7264 } 7265 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7266 7267 template <typename Dispatch> getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const7268 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 7269 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7270 { 7271 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7272 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 7273 } 7274 7275 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7276 template <typename Dispatch> getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const7277 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 7278 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7279 { 7280 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7281 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7282 VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddress && 7283 "Function <vkGetBufferDeviceAddress> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 7284 # endif 7285 7286 VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 7287 7288 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 7289 } 7290 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7291 7292 template <typename Dispatch> getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const7293 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 7294 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7295 { 7296 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7297 return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 7298 } 7299 7300 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7301 template <typename Dispatch> getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const7302 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 7303 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7304 { 7305 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7306 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7307 VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddress && 7308 "Function <vkGetBufferOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 7309 # endif 7310 7311 uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 7312 7313 return result; 7314 } 7315 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7316 7317 template <typename Dispatch> getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const7318 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 7319 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7320 { 7321 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7322 return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 7323 } 7324 7325 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7326 template <typename Dispatch> getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const7327 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, 7328 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7329 { 7330 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7331 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7332 VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddress && 7333 "Function <vkGetDeviceMemoryOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 7334 # endif 7335 7336 uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 7337 7338 return result; 7339 } 7340 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7341 7342 //=== VK_VERSION_1_3 === 7343 7344 template <typename Dispatch> getToolProperties(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,Dispatch const & d) const7345 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, 7346 VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, 7347 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7348 { 7349 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7350 return static_cast<Result>( 7351 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); 7352 } 7353 7354 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7355 template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch> 7356 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7357 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties(Dispatch const & d) const7358 PhysicalDevice::getToolProperties( Dispatch const & d ) const 7359 { 7360 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7361 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7362 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && 7363 "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 7364 # endif 7365 7366 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; 7367 uint32_t toolCount; 7368 VULKAN_HPP_NAMESPACE::Result result; 7369 do 7370 { 7371 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); 7372 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 7373 { 7374 toolProperties.resize( toolCount ); 7375 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7376 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 7377 } 7378 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7379 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); 7380 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 7381 if ( toolCount < toolProperties.size() ) 7382 { 7383 toolProperties.resize( toolCount ); 7384 } 7385 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 7386 } 7387 7388 template < 7389 typename PhysicalDeviceToolPropertiesAllocator, 7390 typename Dispatch, 7391 typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, 7392 int>::type> 7393 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7394 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties(PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,Dispatch const & d) const7395 PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const 7396 { 7397 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7398 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7399 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && 7400 "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 7401 # endif 7402 7403 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( 7404 physicalDeviceToolPropertiesAllocator ); 7405 uint32_t toolCount; 7406 VULKAN_HPP_NAMESPACE::Result result; 7407 do 7408 { 7409 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); 7410 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 7411 { 7412 toolProperties.resize( toolCount ); 7413 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7414 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 7415 } 7416 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7417 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); 7418 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 7419 if ( toolCount < toolProperties.size() ) 7420 { 7421 toolProperties.resize( toolCount ); 7422 } 7423 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 7424 } 7425 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7426 7427 template <typename Dispatch> createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,Dispatch const & d) const7428 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, 7429 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7430 VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, 7431 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7432 { 7433 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7434 return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device, 7435 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), 7436 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7437 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); 7438 } 7439 7440 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7441 template <typename Dispatch> 7442 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7443 Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 7444 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7445 Dispatch const & d ) const 7446 { 7447 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7448 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7449 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7450 # endif 7451 7452 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 7453 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7454 d.vkCreatePrivateDataSlot( m_device, 7455 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 7456 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7457 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 7458 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); 7459 7460 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); 7461 } 7462 7463 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7464 template <typename Dispatch> 7465 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7466 Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 7467 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7468 Dispatch const & d ) const 7469 { 7470 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7471 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7472 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7473 # endif 7474 7475 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 7476 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7477 d.vkCreatePrivateDataSlot( m_device, 7478 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 7479 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7480 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 7481 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); 7482 7483 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 7484 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 7485 } 7486 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 7487 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7488 7489 template <typename Dispatch> destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7490 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7491 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7492 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7493 { 7494 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7495 d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7496 } 7497 7498 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7499 template <typename Dispatch> destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7500 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7501 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7502 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7503 { 7504 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7505 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7506 VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7507 # endif 7508 7509 d.vkDestroyPrivateDataSlot( 7510 m_device, 7511 static_cast<VkPrivateDataSlot>( privateDataSlot ), 7512 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7513 } 7514 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7515 7516 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7517 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7518 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7519 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7520 { 7521 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7522 d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7523 } 7524 7525 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7526 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7527 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7528 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7529 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7530 { 7531 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7532 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7533 VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7534 # endif 7535 7536 d.vkDestroyPrivateDataSlot( 7537 m_device, 7538 static_cast<VkPrivateDataSlot>( privateDataSlot ), 7539 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7540 } 7541 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7542 7543 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 7544 template <typename Dispatch> setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const7545 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7546 uint64_t objectHandle, 7547 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7548 uint64_t data, 7549 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7550 { 7551 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7552 return static_cast<Result>( 7553 d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 7554 } 7555 #else 7556 template <typename Dispatch> setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const7557 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7558 uint64_t objectHandle, 7559 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7560 uint64_t data, 7561 Dispatch const & d ) const 7562 { 7563 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7564 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7565 VULKAN_HPP_ASSERT( d.vkSetPrivateData && "Function <vkSetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7566 # endif 7567 7568 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7569 d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 7570 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); 7571 7572 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 7573 } 7574 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7575 7576 template <typename Dispatch> getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t * pData,Dispatch const & d) const7577 VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7578 uint64_t objectHandle, 7579 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7580 uint64_t * pData, 7581 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7582 { 7583 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7584 d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); 7585 } 7586 7587 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7588 template <typename Dispatch> getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Dispatch const & d) const7589 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 7590 uint64_t objectHandle, 7591 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 7592 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7593 { 7594 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7595 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7596 VULKAN_HPP_ASSERT( d.vkGetPrivateData && "Function <vkGetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 7597 # endif 7598 7599 uint64_t data; 7600 d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); 7601 7602 return data; 7603 } 7604 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7605 7606 template <typename Dispatch> setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const7607 VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, 7608 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 7609 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7610 { 7611 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7612 d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 7613 } 7614 7615 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7616 template <typename Dispatch> setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const7617 VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, 7618 const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 7619 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7620 { 7621 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7622 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7623 VULKAN_HPP_ASSERT( d.vkCmdSetEvent2 && "Function <vkCmdSetEvent2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7624 # endif 7625 7626 d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 7627 } 7628 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7629 7630 template <typename Dispatch> resetEvent2(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const7631 VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, 7632 VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, 7633 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7634 { 7635 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7636 d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); 7637 } 7638 7639 template <typename Dispatch> waitEvents2(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const7640 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, 7641 const VULKAN_HPP_NAMESPACE::Event * pEvents, 7642 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, 7643 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7644 { 7645 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7646 d.vkCmdWaitEvents2( 7647 m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); 7648 } 7649 7650 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7651 template <typename Dispatch> waitEvents2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const7652 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 7653 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, 7654 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 7655 { 7656 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7657 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7658 VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2 && "Function <vkCmdWaitEvents2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7659 # endif 7660 # ifdef VULKAN_HPP_NO_EXCEPTIONS 7661 VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); 7662 # else 7663 if ( events.size() != dependencyInfos.size() ) 7664 { 7665 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); 7666 } 7667 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 7668 7669 d.vkCmdWaitEvents2( m_commandBuffer, 7670 events.size(), 7671 reinterpret_cast<const VkEvent *>( events.data() ), 7672 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); 7673 } 7674 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7675 7676 template <typename Dispatch> pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const7677 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 7678 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7679 { 7680 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7681 d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 7682 } 7683 7684 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7685 template <typename Dispatch> pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const7686 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 7687 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7688 { 7689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7690 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7691 VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2 && "Function <vkCmdPipelineBarrier2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7692 # endif 7693 7694 d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 7695 } 7696 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7697 7698 template <typename Dispatch> writeTimestamp2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const7699 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 7700 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 7701 uint32_t query, 7702 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7703 { 7704 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7705 d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); 7706 } 7707 7708 template <typename Dispatch> submit2(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const7709 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, 7710 const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, 7711 VULKAN_HPP_NAMESPACE::Fence fence, 7712 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7713 { 7714 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7715 return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 7716 } 7717 7718 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7719 template <typename Dispatch> submit2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const7720 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2( 7721 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 7722 { 7723 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7724 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7725 VULKAN_HPP_ASSERT( d.vkQueueSubmit2 && "Function <vkQueueSubmit2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 7726 # endif 7727 7728 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7729 d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 7730 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); 7731 7732 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 7733 } 7734 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7735 7736 template <typename Dispatch> copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const7737 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, 7738 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7739 { 7740 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7741 d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); 7742 } 7743 7744 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7745 template <typename Dispatch> copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const7746 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, 7747 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7748 { 7749 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7750 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7751 VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2 && "Function <vkCmdCopyBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7752 # endif 7753 7754 d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); 7755 } 7756 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7757 7758 template <typename Dispatch> copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const7759 VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7760 { 7761 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7762 d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); 7763 } 7764 7765 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7766 template <typename Dispatch> copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const7767 VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7768 { 7769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7770 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7771 VULKAN_HPP_ASSERT( d.vkCmdCopyImage2 && "Function <vkCmdCopyImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7772 # endif 7773 7774 d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); 7775 } 7776 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7777 7778 template <typename Dispatch> copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const7779 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, 7780 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7781 { 7782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7783 d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); 7784 } 7785 7786 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7787 template <typename Dispatch> copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const7788 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, 7789 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7790 { 7791 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7792 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7793 VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2 && "Function <vkCmdCopyBufferToImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7794 # endif 7795 7796 d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); 7797 } 7798 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7799 7800 template <typename Dispatch> copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const7801 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, 7802 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7803 { 7804 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7805 d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); 7806 } 7807 7808 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7809 template <typename Dispatch> copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const7810 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, 7811 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7812 { 7813 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7814 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7815 VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2 && "Function <vkCmdCopyImageToBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7816 # endif 7817 7818 d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); 7819 } 7820 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7821 7822 template <typename Dispatch> blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const7823 VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7824 { 7825 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7826 d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); 7827 } 7828 7829 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7830 template <typename Dispatch> blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const7831 VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7832 { 7833 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7834 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7835 VULKAN_HPP_ASSERT( d.vkCmdBlitImage2 && "Function <vkCmdBlitImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7836 # endif 7837 7838 d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); 7839 } 7840 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7841 7842 template <typename Dispatch> resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const7843 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, 7844 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7845 { 7846 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7847 d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); 7848 } 7849 7850 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7851 template <typename Dispatch> resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const7852 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, 7853 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7854 { 7855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7856 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7857 VULKAN_HPP_ASSERT( d.vkCmdResolveImage2 && "Function <vkCmdResolveImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 7858 # endif 7859 7860 d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); 7861 } 7862 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7863 7864 template <typename Dispatch> beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,Dispatch const & d) const7865 VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 7866 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7867 { 7868 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7869 d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); 7870 } 7871 7872 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7873 template <typename Dispatch> beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const7874 VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, 7875 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7876 { 7877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7878 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7879 VULKAN_HPP_ASSERT( d.vkCmdBeginRendering && "Function <vkCmdBeginRendering> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" ); 7880 # endif 7881 7882 d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); 7883 } 7884 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7885 7886 template <typename Dispatch> endRendering(Dispatch const & d) const7887 VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7888 { 7889 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7890 d.vkCmdEndRendering( m_commandBuffer ); 7891 } 7892 7893 template <typename Dispatch> setCullMode(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const7894 VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7895 { 7896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7897 d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); 7898 } 7899 7900 template <typename Dispatch> setFrontFace(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const7901 VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7902 { 7903 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7904 d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); 7905 } 7906 7907 template <typename Dispatch> setPrimitiveTopology(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const7908 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, 7909 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7910 { 7911 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7912 d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); 7913 } 7914 7915 template <typename Dispatch> setViewportWithCount(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const7916 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, 7917 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 7918 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7919 { 7920 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7921 d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 7922 } 7923 7924 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7925 template <typename Dispatch> setViewportWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const7926 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 7927 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7928 { 7929 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7930 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7931 VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCount && 7932 "Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 7933 # endif 7934 7935 d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 7936 } 7937 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7938 7939 template <typename Dispatch> 7940 VULKAN_HPP_INLINE void setScissorWithCount(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const7941 CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7942 { 7943 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7944 d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 7945 } 7946 7947 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7948 template <typename Dispatch> setScissorWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const7949 VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 7950 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7951 { 7952 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7953 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7954 VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCount && 7955 "Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 7956 # endif 7957 7958 d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 7959 } 7960 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7961 7962 template <typename Dispatch> bindVertexBuffers2(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const7963 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, 7964 uint32_t bindingCount, 7965 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 7966 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 7967 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 7968 const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, 7969 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7970 { 7971 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7972 d.vkCmdBindVertexBuffers2( m_commandBuffer, 7973 firstBinding, 7974 bindingCount, 7975 reinterpret_cast<const VkBuffer *>( pBuffers ), 7976 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 7977 reinterpret_cast<const VkDeviceSize *>( pSizes ), 7978 reinterpret_cast<const VkDeviceSize *>( pStrides ) ); 7979 } 7980 7981 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7982 template <typename Dispatch> bindVertexBuffers2(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const7983 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, 7984 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 7985 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 7986 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 7987 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, 7988 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 7989 { 7990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7991 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 7992 VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2 && 7993 "Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 7994 # endif 7995 # ifdef VULKAN_HPP_NO_EXCEPTIONS 7996 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 7997 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 7998 VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); 7999 # else 8000 if ( buffers.size() != offsets.size() ) 8001 { 8002 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); 8003 } 8004 if ( !sizes.empty() && buffers.size() != sizes.size() ) 8005 { 8006 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); 8007 } 8008 if ( !strides.empty() && buffers.size() != strides.size() ) 8009 { 8010 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); 8011 } 8012 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 8013 8014 d.vkCmdBindVertexBuffers2( m_commandBuffer, 8015 firstBinding, 8016 buffers.size(), 8017 reinterpret_cast<const VkBuffer *>( buffers.data() ), 8018 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 8019 reinterpret_cast<const VkDeviceSize *>( sizes.data() ), 8020 reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); 8021 } 8022 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8023 8024 template <typename Dispatch> setDepthTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const8025 VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8026 { 8027 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8028 d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); 8029 } 8030 8031 template <typename Dispatch> setDepthWriteEnable(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const8032 VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8033 { 8034 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8035 d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); 8036 } 8037 8038 template <typename Dispatch> setDepthCompareOp(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const8039 VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8040 { 8041 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8042 d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); 8043 } 8044 8045 template <typename Dispatch> setDepthBoundsTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const8046 VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, 8047 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8048 { 8049 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8050 d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); 8051 } 8052 8053 template <typename Dispatch> setStencilTestEnable(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const8054 VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8055 { 8056 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8057 d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); 8058 } 8059 8060 template <typename Dispatch> setStencilOp(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const8061 VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 8062 VULKAN_HPP_NAMESPACE::StencilOp failOp, 8063 VULKAN_HPP_NAMESPACE::StencilOp passOp, 8064 VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, 8065 VULKAN_HPP_NAMESPACE::CompareOp compareOp, 8066 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8067 { 8068 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8069 d.vkCmdSetStencilOp( m_commandBuffer, 8070 static_cast<VkStencilFaceFlags>( faceMask ), 8071 static_cast<VkStencilOp>( failOp ), 8072 static_cast<VkStencilOp>( passOp ), 8073 static_cast<VkStencilOp>( depthFailOp ), 8074 static_cast<VkCompareOp>( compareOp ) ); 8075 } 8076 8077 template <typename Dispatch> setRasterizerDiscardEnable(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const8078 VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, 8079 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8080 { 8081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8082 d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); 8083 } 8084 8085 template <typename Dispatch> setDepthBiasEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const8086 VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8087 { 8088 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8089 d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); 8090 } 8091 8092 template <typename Dispatch> setPrimitiveRestartEnable(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const8093 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, 8094 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8095 { 8096 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8097 d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); 8098 } 8099 8100 template <typename Dispatch> getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const8101 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, 8102 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 8103 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8104 { 8105 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8106 d.vkGetDeviceBufferMemoryRequirements( 8107 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 8108 } 8109 8110 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8111 template <typename Dispatch> 8112 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const8113 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8114 { 8115 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8116 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8117 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && 8118 "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8119 # endif 8120 8121 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 8122 d.vkGetDeviceBufferMemoryRequirements( 8123 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8124 8125 return memoryRequirements; 8126 } 8127 8128 template <typename X, typename Y, typename... Z, typename Dispatch> 8129 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const8130 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8131 { 8132 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8133 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8134 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && 8135 "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8136 # endif 8137 8138 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8139 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 8140 d.vkGetDeviceBufferMemoryRequirements( 8141 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8142 8143 return structureChain; 8144 } 8145 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8146 8147 template <typename Dispatch> getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const8148 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 8149 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 8150 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8151 { 8152 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8153 d.vkGetDeviceImageMemoryRequirements( 8154 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 8155 } 8156 8157 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8158 template <typename Dispatch> 8159 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const8160 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8161 { 8162 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8163 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8164 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && 8165 "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8166 # endif 8167 8168 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 8169 d.vkGetDeviceImageMemoryRequirements( 8170 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8171 8172 return memoryRequirements; 8173 } 8174 8175 template <typename X, typename Y, typename... Z, typename Dispatch> 8176 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const8177 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8178 { 8179 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8180 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8181 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && 8182 "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8183 # endif 8184 8185 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 8186 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 8187 d.vkGetDeviceImageMemoryRequirements( 8188 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 8189 8190 return structureChain; 8191 } 8192 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8193 8194 template <typename Dispatch> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const8195 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 8196 uint32_t * pSparseMemoryRequirementCount, 8197 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 8198 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8199 { 8200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8201 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 8202 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 8203 pSparseMemoryRequirementCount, 8204 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 8205 } 8206 8207 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8208 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 8209 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const8210 Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const 8211 { 8212 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8213 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8214 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && 8215 "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8216 # endif 8217 8218 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 8219 uint32_t sparseMemoryRequirementCount; 8220 d.vkGetDeviceImageSparseMemoryRequirements( 8221 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 8222 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8223 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 8224 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 8225 &sparseMemoryRequirementCount, 8226 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 8227 8228 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 8229 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 8230 { 8231 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8232 } 8233 return sparseMemoryRequirements; 8234 } 8235 8236 template <typename SparseImageMemoryRequirements2Allocator, 8237 typename Dispatch, 8238 typename std::enable_if< 8239 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 8240 int>::type> 8241 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const8242 Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, 8243 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 8244 Dispatch const & d ) const 8245 { 8246 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8247 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8248 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && 8249 "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 8250 # endif 8251 8252 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 8253 sparseImageMemoryRequirements2Allocator ); 8254 uint32_t sparseMemoryRequirementCount; 8255 d.vkGetDeviceImageSparseMemoryRequirements( 8256 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 8257 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8258 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 8259 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 8260 &sparseMemoryRequirementCount, 8261 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 8262 8263 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 8264 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 8265 { 8266 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 8267 } 8268 return sparseMemoryRequirements; 8269 } 8270 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8271 8272 //=== VK_KHR_surface === 8273 8274 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8275 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8276 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8277 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8278 { 8279 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8280 d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8281 } 8282 8283 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8284 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8285 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8286 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8287 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8288 { 8289 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8290 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8291 VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" ); 8292 # endif 8293 8294 d.vkDestroySurfaceKHR( m_instance, 8295 static_cast<VkSurfaceKHR>( surface ), 8296 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8297 } 8298 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8299 8300 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8301 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8302 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8303 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8304 { 8305 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8306 d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8307 } 8308 8309 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8310 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8311 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8312 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8313 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8314 { 8315 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8316 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8317 VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" ); 8318 # endif 8319 8320 d.vkDestroySurfaceKHR( m_instance, 8321 static_cast<VkSurfaceKHR>( surface ), 8322 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8323 } 8324 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8325 8326 template <typename Dispatch> getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::Bool32 * pSupported,Dispatch const & d) const8327 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, 8328 VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8329 VULKAN_HPP_NAMESPACE::Bool32 * pSupported, 8330 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8331 { 8332 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8333 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( 8334 m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) ); 8335 } 8336 8337 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8338 template <typename Dispatch> 8339 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8340 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8341 { 8342 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8343 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8344 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceSupportKHR && "Function <vkGetPhysicalDeviceSurfaceSupportKHR> requires <VK_KHR_surface>" ); 8345 # endif 8346 8347 VULKAN_HPP_NAMESPACE::Bool32 supported; 8348 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( 8349 m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) ) ); 8350 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); 8351 8352 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( supported ) ); 8353 } 8354 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8355 8356 template <typename Dispatch> getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,Dispatch const & d) const8357 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8358 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, 8359 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8360 { 8361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8362 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 8363 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) ); 8364 } 8365 8366 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8367 template <typename Dispatch> 8368 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8369 PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8370 { 8371 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8372 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8373 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR && "Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> requires <VK_KHR_surface>" ); 8374 # endif 8375 8376 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; 8377 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 8378 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) ); 8379 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); 8380 8381 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); 8382 } 8383 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8384 8385 template <typename Dispatch> getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,Dispatch const & d) const8386 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8387 uint32_t * pSurfaceFormatCount, 8388 VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, 8389 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8390 { 8391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8392 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 8393 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) ); 8394 } 8395 8396 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8397 template <typename SurfaceFormatKHRAllocator, typename Dispatch> 8398 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8399 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8400 { 8401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8402 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8403 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" ); 8404 # endif 8405 8406 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats; 8407 uint32_t surfaceFormatCount; 8408 VULKAN_HPP_NAMESPACE::Result result; 8409 do 8410 { 8411 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8412 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 8413 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 8414 { 8415 surfaceFormats.resize( surfaceFormatCount ); 8416 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 8417 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 8418 } 8419 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8420 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 8421 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 8422 if ( surfaceFormatCount < surfaceFormats.size() ) 8423 { 8424 surfaceFormats.resize( surfaceFormatCount ); 8425 } 8426 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 8427 } 8428 8429 template <typename SurfaceFormatKHRAllocator, 8430 typename Dispatch, 8431 typename std::enable_if<std::is_same<typename SurfaceFormatKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, int>::type> 8432 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,Dispatch const & d) const8433 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8434 SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, 8435 Dispatch const & d ) const 8436 { 8437 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8438 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8439 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" ); 8440 # endif 8441 8442 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator ); 8443 uint32_t surfaceFormatCount; 8444 VULKAN_HPP_NAMESPACE::Result result; 8445 do 8446 { 8447 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8448 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 8449 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 8450 { 8451 surfaceFormats.resize( surfaceFormatCount ); 8452 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 8453 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 8454 } 8455 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8456 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 8457 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 8458 if ( surfaceFormatCount < surfaceFormats.size() ) 8459 { 8460 surfaceFormats.resize( surfaceFormatCount ); 8461 } 8462 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 8463 } 8464 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8465 8466 template <typename Dispatch> getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const8467 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8468 uint32_t * pPresentModeCount, 8469 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 8470 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8471 { 8472 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8473 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 8474 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 8475 } 8476 8477 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8478 template <typename PresentModeKHRAllocator, typename Dispatch> 8479 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8480 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8481 { 8482 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8483 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8484 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" ); 8485 # endif 8486 8487 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes; 8488 uint32_t presentModeCount; 8489 VULKAN_HPP_NAMESPACE::Result result; 8490 do 8491 { 8492 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8493 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 8494 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 8495 { 8496 presentModes.resize( presentModeCount ); 8497 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 8498 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 8499 } 8500 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8501 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 8502 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 8503 if ( presentModeCount < presentModes.size() ) 8504 { 8505 presentModes.resize( presentModeCount ); 8506 } 8507 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 8508 } 8509 8510 template <typename PresentModeKHRAllocator, 8511 typename Dispatch, 8512 typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 8513 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const8514 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8515 PresentModeKHRAllocator & presentModeKHRAllocator, 8516 Dispatch const & d ) const 8517 { 8518 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8519 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8520 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" ); 8521 # endif 8522 8523 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 8524 uint32_t presentModeCount; 8525 VULKAN_HPP_NAMESPACE::Result result; 8526 do 8527 { 8528 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8529 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 8530 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 8531 { 8532 presentModes.resize( presentModeCount ); 8533 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 8534 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 8535 } 8536 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8537 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 8538 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 8539 if ( presentModeCount < presentModes.size() ) 8540 { 8541 presentModes.resize( presentModeCount ); 8542 } 8543 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 8544 } 8545 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8546 8547 //=== VK_KHR_swapchain === 8548 8549 template <typename Dispatch> createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,Dispatch const & d) const8550 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, 8551 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8552 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, 8553 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8554 { 8555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8556 return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, 8557 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), 8558 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8559 reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) ); 8560 } 8561 8562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8563 template <typename Dispatch> 8564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8565 Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 8566 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8567 Dispatch const & d ) const 8568 { 8569 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8570 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8571 VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" ); 8572 # endif 8573 8574 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 8575 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8576 d.vkCreateSwapchainKHR( m_device, 8577 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 8578 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8579 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 8580 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); 8581 8582 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); 8583 } 8584 8585 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8586 template <typename Dispatch> 8587 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8588 Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 8589 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8590 Dispatch const & d ) const 8591 { 8592 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8593 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8594 VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" ); 8595 # endif 8596 8597 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 8598 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8599 d.vkCreateSwapchainKHR( m_device, 8600 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 8601 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8602 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 8603 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); 8604 8605 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 8606 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 8607 } 8608 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 8609 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8610 8611 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8612 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 8613 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8614 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8615 { 8616 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8617 d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8618 } 8619 8620 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8621 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8622 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 8623 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8624 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8625 { 8626 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8627 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8628 VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" ); 8629 # endif 8630 8631 d.vkDestroySwapchainKHR( m_device, 8632 static_cast<VkSwapchainKHR>( swapchain ), 8633 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8634 } 8635 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8636 8637 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8638 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 8639 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8640 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8641 { 8642 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8643 d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8644 } 8645 8646 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8647 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8648 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 8649 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8650 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8651 { 8652 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8653 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8654 VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" ); 8655 # endif 8656 8657 d.vkDestroySwapchainKHR( m_device, 8658 static_cast<VkSwapchainKHR>( swapchain ), 8659 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8660 } 8661 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8662 8663 template <typename Dispatch> getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,Dispatch const & d) const8664 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 8665 uint32_t * pSwapchainImageCount, 8666 VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, 8667 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8668 { 8669 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8670 return static_cast<Result>( 8671 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) ); 8672 } 8673 8674 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8675 template <typename ImageAllocator, typename Dispatch> 8676 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const8677 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 8678 { 8679 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8680 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8681 VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" ); 8682 # endif 8683 8684 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages; 8685 uint32_t swapchainImageCount; 8686 VULKAN_HPP_NAMESPACE::Result result; 8687 do 8688 { 8689 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8690 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 8691 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) 8692 { 8693 swapchainImages.resize( swapchainImageCount ); 8694 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR( 8695 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 8696 } 8697 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8698 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 8699 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 8700 if ( swapchainImageCount < swapchainImages.size() ) 8701 { 8702 swapchainImages.resize( swapchainImageCount ); 8703 } 8704 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); 8705 } 8706 8707 template <typename ImageAllocator, 8708 typename Dispatch, 8709 typename std::enable_if<std::is_same<typename ImageAllocator::value_type, VULKAN_HPP_NAMESPACE::Image>::value, int>::type> 8710 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,ImageAllocator & imageAllocator,Dispatch const & d) const8711 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const 8712 { 8713 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8714 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8715 VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" ); 8716 # endif 8717 8718 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator ); 8719 uint32_t swapchainImageCount; 8720 VULKAN_HPP_NAMESPACE::Result result; 8721 do 8722 { 8723 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8724 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 8725 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) 8726 { 8727 swapchainImages.resize( swapchainImageCount ); 8728 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR( 8729 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 8730 } 8731 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8732 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 8733 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 8734 if ( swapchainImageCount < swapchainImages.size() ) 8735 { 8736 swapchainImages.resize( swapchainImageCount ); 8737 } 8738 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); 8739 } 8740 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8741 8742 template <typename Dispatch> acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,uint32_t * pImageIndex,Dispatch const & d) const8743 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 8744 uint64_t timeout, 8745 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 8746 VULKAN_HPP_NAMESPACE::Fence fence, 8747 uint32_t * pImageIndex, 8748 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8749 { 8750 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8751 return static_cast<Result>( d.vkAcquireNextImageKHR( 8752 m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) ); 8753 } 8754 8755 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8756 template <typename Dispatch> acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const8757 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 8758 uint64_t timeout, 8759 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 8760 VULKAN_HPP_NAMESPACE::Fence fence, 8761 Dispatch const & d ) const 8762 { 8763 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8764 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8765 VULKAN_HPP_ASSERT( d.vkAcquireNextImageKHR && "Function <vkAcquireNextImageKHR> requires <VK_KHR_swapchain>" ); 8766 # endif 8767 8768 uint32_t imageIndex; 8769 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireNextImageKHR( 8770 m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) ); 8771 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 8772 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", 8773 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 8774 VULKAN_HPP_NAMESPACE::Result::eTimeout, 8775 VULKAN_HPP_NAMESPACE::Result::eNotReady, 8776 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 8777 8778 return ResultValue<uint32_t>( result, std::move( imageIndex ) ); 8779 } 8780 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8781 8782 template <typename Dispatch> presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,Dispatch const & d) const8783 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, 8784 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8785 { 8786 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8787 return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) ); 8788 } 8789 8790 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8791 template <typename Dispatch> presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,Dispatch const & d) const8792 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, 8793 Dispatch const & d ) const 8794 { 8795 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8796 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8797 VULKAN_HPP_ASSERT( d.vkQueuePresentKHR && "Function <vkQueuePresentKHR> requires <VK_KHR_swapchain>" ); 8798 # endif 8799 8800 VULKAN_HPP_NAMESPACE::Result result = 8801 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) ); 8802 VULKAN_HPP_NAMESPACE::detail::resultCheck( 8803 result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 8804 8805 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 8806 } 8807 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8808 8809 template <typename Dispatch> getGroupPresentCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,Dispatch const & d) const8810 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( 8811 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8812 { 8813 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8814 return static_cast<Result>( 8815 d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) ); 8816 } 8817 8818 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8819 template <typename Dispatch> 8820 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const & d) const8821 Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const 8822 { 8823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8824 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8825 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPresentCapabilitiesKHR && 8826 "Function <vkGetDeviceGroupPresentCapabilitiesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 8827 # endif 8828 8829 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; 8830 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8831 d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) ); 8832 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); 8833 8834 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); 8835 } 8836 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8837 8838 template <typename Dispatch> getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const8839 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8840 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 8841 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8842 { 8843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8844 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( 8845 m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 8846 } 8847 8848 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8849 template <typename Dispatch> 8850 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8851 Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8852 { 8853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8854 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8855 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModesKHR && 8856 "Function <vkGetDeviceGroupSurfacePresentModesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 8857 # endif 8858 8859 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 8860 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( 8861 m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 8862 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); 8863 8864 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); 8865 } 8866 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8867 8868 template <typename Dispatch> getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pRectCount,VULKAN_HPP_NAMESPACE::Rect2D * pRects,Dispatch const & d) const8869 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 8870 uint32_t * pRectCount, 8871 VULKAN_HPP_NAMESPACE::Rect2D * pRects, 8872 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8873 { 8874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8875 return static_cast<Result>( 8876 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) ); 8877 } 8878 8879 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8880 template <typename Rect2DAllocator, typename Dispatch> 8881 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8882 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8883 { 8884 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8885 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8886 VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && 8887 "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 8888 # endif 8889 8890 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects; 8891 uint32_t rectCount; 8892 VULKAN_HPP_NAMESPACE::Result result; 8893 do 8894 { 8895 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8896 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 8897 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) 8898 { 8899 rects.resize( rectCount ); 8900 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 8901 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 8902 } 8903 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8904 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 8905 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 8906 if ( rectCount < rects.size() ) 8907 { 8908 rects.resize( rectCount ); 8909 } 8910 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); 8911 } 8912 8913 template <typename Rect2DAllocator, 8914 typename Dispatch, 8915 typename std::enable_if<std::is_same<typename Rect2DAllocator::value_type, VULKAN_HPP_NAMESPACE::Rect2D>::value, int>::type> 8916 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Rect2DAllocator & rect2DAllocator,Dispatch const & d) const8917 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const 8918 { 8919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8920 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8921 VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && 8922 "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 8923 # endif 8924 8925 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator ); 8926 uint32_t rectCount; 8927 VULKAN_HPP_NAMESPACE::Result result; 8928 do 8929 { 8930 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8931 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 8932 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) 8933 { 8934 rects.resize( rectCount ); 8935 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 8936 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 8937 } 8938 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8939 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 8940 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 8941 if ( rectCount < rects.size() ) 8942 { 8943 rects.resize( rectCount ); 8944 } 8945 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); 8946 } 8947 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8948 8949 template <typename Dispatch> acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex,Dispatch const & d) const8950 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, 8951 uint32_t * pImageIndex, 8952 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8953 { 8954 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8955 return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) ); 8956 } 8957 8958 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8959 template <typename Dispatch> acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,Dispatch const & d) const8960 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, 8961 Dispatch const & d ) const 8962 { 8963 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8964 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 8965 VULKAN_HPP_ASSERT( d.vkAcquireNextImage2KHR && "Function <vkAcquireNextImage2KHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); 8966 # endif 8967 8968 uint32_t imageIndex; 8969 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8970 d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) ); 8971 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 8972 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", 8973 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 8974 VULKAN_HPP_NAMESPACE::Result::eTimeout, 8975 VULKAN_HPP_NAMESPACE::Result::eNotReady, 8976 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 8977 8978 return ResultValue<uint32_t>( result, std::move( imageIndex ) ); 8979 } 8980 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8981 8982 //=== VK_KHR_display === 8983 8984 template <typename Dispatch> getDisplayPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,Dispatch const & d) const8985 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, 8986 VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, 8987 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8988 { 8989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8990 return static_cast<Result>( 8991 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) ); 8992 } 8993 8994 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8995 template <typename DisplayPropertiesKHRAllocator, typename Dispatch> 8996 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(Dispatch const & d) const8997 PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const 8998 { 8999 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9000 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9001 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" ); 9002 # endif 9003 9004 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties; 9005 uint32_t propertyCount; 9006 VULKAN_HPP_NAMESPACE::Result result; 9007 do 9008 { 9009 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9010 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9011 { 9012 properties.resize( propertyCount ); 9013 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9014 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 9015 } 9016 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9017 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 9018 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9019 if ( propertyCount < properties.size() ) 9020 { 9021 properties.resize( propertyCount ); 9022 } 9023 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9024 } 9025 9026 template < 9027 typename DisplayPropertiesKHRAllocator, 9028 typename Dispatch, 9029 typename std::enable_if<std::is_same<typename DisplayPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, int>::type> 9030 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,Dispatch const & d) const9031 PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const 9032 { 9033 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9034 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9035 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" ); 9036 # endif 9037 9038 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator ); 9039 uint32_t propertyCount; 9040 VULKAN_HPP_NAMESPACE::Result result; 9041 do 9042 { 9043 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9044 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9045 { 9046 properties.resize( propertyCount ); 9047 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9048 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 9049 } 9050 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9051 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 9052 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9053 if ( propertyCount < properties.size() ) 9054 { 9055 properties.resize( propertyCount ); 9056 } 9057 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9058 } 9059 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9060 9061 template <typename Dispatch> getDisplayPlanePropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,Dispatch const & d) const9062 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, 9063 VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, 9064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9065 { 9066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9067 return static_cast<Result>( 9068 d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) ); 9069 } 9070 9071 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9072 template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch> 9073 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9074 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(Dispatch const & d) const9075 PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const 9076 { 9077 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9078 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9079 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" ); 9080 # endif 9081 9082 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties; 9083 uint32_t propertyCount; 9084 VULKAN_HPP_NAMESPACE::Result result; 9085 do 9086 { 9087 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9088 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9089 { 9090 properties.resize( propertyCount ); 9091 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 9092 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 9093 } 9094 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9095 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 9096 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9097 if ( propertyCount < properties.size() ) 9098 { 9099 properties.resize( propertyCount ); 9100 } 9101 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9102 } 9103 9104 template < 9105 typename DisplayPlanePropertiesKHRAllocator, 9106 typename Dispatch, 9107 typename std::enable_if<std::is_same<typename DisplayPlanePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, 9108 int>::type> 9109 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9110 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,Dispatch const & d) const9111 PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const 9112 { 9113 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9114 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9115 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" ); 9116 # endif 9117 9118 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator ); 9119 uint32_t propertyCount; 9120 VULKAN_HPP_NAMESPACE::Result result; 9121 do 9122 { 9123 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 9124 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9125 { 9126 properties.resize( propertyCount ); 9127 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 9128 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 9129 } 9130 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9131 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 9132 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9133 if ( propertyCount < properties.size() ) 9134 { 9135 properties.resize( propertyCount ); 9136 } 9137 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9138 } 9139 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9140 9141 template <typename Dispatch> getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,uint32_t * pDisplayCount,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,Dispatch const & d) const9142 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, 9143 uint32_t * pDisplayCount, 9144 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, 9145 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9146 { 9147 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9148 return static_cast<Result>( 9149 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) ); 9150 } 9151 9152 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9153 template <typename DisplayKHRAllocator, typename Dispatch> 9154 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,Dispatch const & d) const9155 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const 9156 { 9157 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9158 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9159 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" ); 9160 # endif 9161 9162 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays; 9163 uint32_t displayCount; 9164 VULKAN_HPP_NAMESPACE::Result result; 9165 do 9166 { 9167 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 9168 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) 9169 { 9170 displays.resize( displayCount ); 9171 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9172 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 9173 } 9174 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9175 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 9176 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 9177 if ( displayCount < displays.size() ) 9178 { 9179 displays.resize( displayCount ); 9180 } 9181 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); 9182 } 9183 9184 template <typename DisplayKHRAllocator, 9185 typename Dispatch, 9186 typename std::enable_if<std::is_same<typename DisplayKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayKHR>::value, int>::type> 9187 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,DisplayKHRAllocator & displayKHRAllocator,Dispatch const & d) const9188 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const 9189 { 9190 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9191 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9192 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" ); 9193 # endif 9194 9195 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator ); 9196 uint32_t displayCount; 9197 VULKAN_HPP_NAMESPACE::Result result; 9198 do 9199 { 9200 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 9201 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) 9202 { 9203 displays.resize( displayCount ); 9204 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9205 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 9206 } 9207 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9208 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 9209 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 9210 if ( displayCount < displays.size() ) 9211 { 9212 displays.resize( displayCount ); 9213 } 9214 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); 9215 } 9216 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9217 9218 template <typename Dispatch> getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,Dispatch const & d) const9219 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9220 uint32_t * pPropertyCount, 9221 VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, 9222 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9223 { 9224 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9225 return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( 9226 m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) ); 9227 } 9228 9229 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9230 template <typename DisplayModePropertiesKHRAllocator, typename Dispatch> 9231 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9232 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const9233 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 9234 { 9235 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9236 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9237 VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" ); 9238 # endif 9239 9240 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties; 9241 uint32_t propertyCount; 9242 VULKAN_HPP_NAMESPACE::Result result; 9243 do 9244 { 9245 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9246 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 9247 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9248 { 9249 properties.resize( propertyCount ); 9250 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR( 9251 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 9252 } 9253 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9254 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 9255 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9256 if ( propertyCount < properties.size() ) 9257 { 9258 properties.resize( propertyCount ); 9259 } 9260 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9261 } 9262 9263 template <typename DisplayModePropertiesKHRAllocator, 9264 typename Dispatch, 9265 typename std::enable_if<std::is_same<typename DisplayModePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, 9266 int>::type> 9267 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9268 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,Dispatch const & d) const9269 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9270 DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, 9271 Dispatch const & d ) const 9272 { 9273 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9274 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9275 VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" ); 9276 # endif 9277 9278 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator ); 9279 uint32_t propertyCount; 9280 VULKAN_HPP_NAMESPACE::Result result; 9281 do 9282 { 9283 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9284 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 9285 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 9286 { 9287 properties.resize( propertyCount ); 9288 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR( 9289 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 9290 } 9291 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9292 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 9293 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9294 if ( propertyCount < properties.size() ) 9295 { 9296 properties.resize( propertyCount ); 9297 } 9298 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 9299 } 9300 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9301 9302 template <typename Dispatch> createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,Dispatch const & d) const9303 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9304 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, 9305 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9306 VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, 9307 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9308 { 9309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9310 return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, 9311 static_cast<VkDisplayKHR>( display ), 9312 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), 9313 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9314 reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) ); 9315 } 9316 9317 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9318 template <typename Dispatch> 9319 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9320 PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9321 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, 9322 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9323 Dispatch const & d ) const 9324 { 9325 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9326 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9327 VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" ); 9328 # endif 9329 9330 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 9331 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9332 d.vkCreateDisplayModeKHR( m_physicalDevice, 9333 static_cast<VkDisplayKHR>( display ), 9334 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 9335 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9336 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 9337 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); 9338 9339 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( mode ) ); 9340 } 9341 9342 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9343 template <typename Dispatch> 9344 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type createDisplayModeKHRUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9345 PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 9346 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, 9347 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9348 Dispatch const & d ) const 9349 { 9350 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9351 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9352 VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" ); 9353 # endif 9354 9355 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 9356 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9357 d.vkCreateDisplayModeKHR( m_physicalDevice, 9358 static_cast<VkDisplayKHR>( display ), 9359 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 9360 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9361 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 9362 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); 9363 9364 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9365 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) ); 9366 } 9367 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9368 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9369 9370 template <typename Dispatch> 9371 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,Dispatch const & d) const9372 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, 9373 uint32_t planeIndex, 9374 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, 9375 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9376 { 9377 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9378 return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( 9379 m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) ); 9380 } 9381 9382 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9383 template <typename Dispatch> 9384 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,Dispatch const & d) const9385 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const 9386 { 9387 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9388 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9389 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilitiesKHR && "Function <vkGetDisplayPlaneCapabilitiesKHR> requires <VK_KHR_display>" ); 9390 # endif 9391 9392 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; 9393 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilitiesKHR( 9394 m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) ); 9395 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); 9396 9397 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); 9398 } 9399 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9400 9401 template <typename Dispatch> createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9402 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, 9403 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9404 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9405 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9406 { 9407 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9408 return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, 9409 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), 9410 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9411 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 9412 } 9413 9414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9415 template <typename Dispatch> 9416 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9417 Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, 9418 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9419 Dispatch const & d ) const 9420 { 9421 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9422 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9423 VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" ); 9424 # endif 9425 9426 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9427 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR( 9428 m_instance, 9429 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 9430 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9431 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9432 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); 9433 9434 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 9435 } 9436 9437 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9438 template <typename Dispatch> 9439 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDisplayPlaneSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9440 Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, 9441 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9442 Dispatch const & d ) const 9443 { 9444 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9445 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9446 VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" ); 9447 # endif 9448 9449 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9450 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR( 9451 m_instance, 9452 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 9453 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9454 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9455 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); 9456 9457 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9458 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9459 } 9460 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9461 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9462 9463 //=== VK_KHR_display_swapchain === 9464 9465 template <typename Dispatch> createSharedSwapchainsKHR(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,Dispatch const & d) const9466 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, 9467 const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, 9468 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9469 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 9470 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9471 { 9472 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9473 return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 9474 swapchainCount, 9475 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), 9476 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9477 reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) ); 9478 } 9479 9480 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9481 template <typename SwapchainKHRAllocator, typename Dispatch> 9482 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9483 Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 9484 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9485 Dispatch const & d ) const 9486 { 9487 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9488 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9489 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 9490 # endif 9491 9492 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() ); 9493 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 9494 m_device, 9495 createInfos.size(), 9496 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 9497 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9498 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 9499 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 9500 9501 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); 9502 } 9503 9504 template <typename SwapchainKHRAllocator, 9505 typename Dispatch, 9506 typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SwapchainKHR>::value, int>::type> 9507 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const9508 Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 9509 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9510 SwapchainKHRAllocator & swapchainKHRAllocator, 9511 Dispatch const & d ) const 9512 { 9513 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9514 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9515 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 9516 # endif 9517 9518 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator ); 9519 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 9520 m_device, 9521 createInfos.size(), 9522 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 9523 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9524 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 9525 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 9526 9527 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); 9528 } 9529 9530 template <typename Dispatch> 9531 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSharedSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9532 Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 9533 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9534 Dispatch const & d ) const 9535 { 9536 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9537 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9538 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 9539 # endif 9540 9541 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 9542 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 9543 m_device, 9544 1, 9545 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 9546 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9547 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 9548 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); 9549 9550 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); 9551 } 9552 9553 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9554 template <typename Dispatch, typename SwapchainKHRAllocator> 9555 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9556 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9557 Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 9558 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9559 Dispatch const & d ) const 9560 { 9561 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9562 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9563 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 9564 # endif 9565 9566 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); 9567 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 9568 m_device, 9569 createInfos.size(), 9570 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 9571 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9572 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 9573 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 9574 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains; 9575 uniqueSwapchains.reserve( createInfos.size() ); 9576 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 9577 for ( auto const & swapchain : swapchains ) 9578 { 9579 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); 9580 } 9581 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); 9582 } 9583 9584 template <typename Dispatch, 9585 typename SwapchainKHRAllocator, 9586 typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::value, 9587 int>::type> 9588 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9589 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const9590 Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 9591 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9592 SwapchainKHRAllocator & swapchainKHRAllocator, 9593 Dispatch const & d ) const 9594 { 9595 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9596 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9597 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 9598 # endif 9599 9600 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); 9601 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 9602 m_device, 9603 createInfos.size(), 9604 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 9605 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9606 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 9607 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 9608 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); 9609 uniqueSwapchains.reserve( createInfos.size() ); 9610 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 9611 for ( auto const & swapchain : swapchains ) 9612 { 9613 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); 9614 } 9615 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); 9616 } 9617 9618 template <typename Dispatch> 9619 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9620 Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 9621 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9622 Dispatch const & d ) const 9623 { 9624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9625 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9626 VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); 9627 # endif 9628 9629 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 9630 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 9631 m_device, 9632 1, 9633 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 9634 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9635 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 9636 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); 9637 9638 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9639 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 9640 } 9641 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9642 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9643 9644 #if defined( VK_USE_PLATFORM_XLIB_KHR ) 9645 //=== VK_KHR_xlib_surface === 9646 9647 template <typename Dispatch> createXlibSurfaceKHR(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9648 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, 9649 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9650 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9651 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9652 { 9653 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9654 return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, 9655 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), 9656 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9657 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 9658 } 9659 9660 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9661 template <typename Dispatch> 9662 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9663 Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, 9664 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9665 Dispatch const & d ) const 9666 { 9667 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9668 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9669 VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" ); 9670 # endif 9671 9672 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9673 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9674 d.vkCreateXlibSurfaceKHR( m_instance, 9675 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 9676 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9677 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9678 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); 9679 9680 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 9681 } 9682 9683 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9684 template <typename Dispatch> 9685 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXlibSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9686 Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, 9687 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9688 Dispatch const & d ) const 9689 { 9690 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9691 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9692 VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" ); 9693 # endif 9694 9695 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9696 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9697 d.vkCreateXlibSurfaceKHR( m_instance, 9698 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 9699 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9700 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9701 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); 9702 9703 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9704 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9705 } 9706 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9707 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9708 9709 template <typename Dispatch> 9710 VULKAN_HPP_INLINE Bool32 getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display * dpy,VisualID visualID,Dispatch const & d) const9711 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9712 { 9713 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9714 return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); 9715 } 9716 9717 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9718 template <typename Dispatch> 9719 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display & dpy,VisualID visualID,Dispatch const & d) const9720 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9721 { 9722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9723 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9724 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXlibPresentationSupportKHR && 9725 "Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> requires <VK_KHR_xlib_surface>" ); 9726 # endif 9727 9728 VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); 9729 9730 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 9731 } 9732 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9733 #endif /*VK_USE_PLATFORM_XLIB_KHR*/ 9734 9735 #if defined( VK_USE_PLATFORM_XCB_KHR ) 9736 //=== VK_KHR_xcb_surface === 9737 9738 template <typename Dispatch> createXcbSurfaceKHR(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9739 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, 9740 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9741 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9742 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9743 { 9744 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9745 return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, 9746 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), 9747 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9748 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 9749 } 9750 9751 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9752 template <typename Dispatch> 9753 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9754 Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, 9755 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9756 Dispatch const & d ) const 9757 { 9758 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9759 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9760 VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" ); 9761 # endif 9762 9763 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9764 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9765 d.vkCreateXcbSurfaceKHR( m_instance, 9766 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 9767 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9768 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9769 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); 9770 9771 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 9772 } 9773 9774 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9775 template <typename Dispatch> 9776 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXcbSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9777 Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, 9778 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9779 Dispatch const & d ) const 9780 { 9781 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9782 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9783 VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" ); 9784 # endif 9785 9786 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9787 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9788 d.vkCreateXcbSurfaceKHR( m_instance, 9789 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 9790 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9791 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9792 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); 9793 9794 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9795 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9796 } 9797 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9798 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9799 9800 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id,Dispatch const & d) const9801 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 9802 xcb_connection_t * connection, 9803 xcb_visualid_t visual_id, 9804 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9805 { 9806 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9807 return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); 9808 } 9809 9810 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9811 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t & connection,xcb_visualid_t visual_id,Dispatch const & d) const9812 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 9813 xcb_connection_t & connection, 9814 xcb_visualid_t visual_id, 9815 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9816 { 9817 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9818 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9819 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXcbPresentationSupportKHR && 9820 "Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> requires <VK_KHR_xcb_surface>" ); 9821 # endif 9822 9823 VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); 9824 9825 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 9826 } 9827 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9828 #endif /*VK_USE_PLATFORM_XCB_KHR*/ 9829 9830 #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) 9831 //=== VK_KHR_wayland_surface === 9832 9833 template <typename Dispatch> createWaylandSurfaceKHR(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9834 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, 9835 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9836 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9837 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9838 { 9839 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9840 return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, 9841 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), 9842 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9843 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 9844 } 9845 9846 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9847 template <typename Dispatch> 9848 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9849 Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, 9850 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9851 Dispatch const & d ) const 9852 { 9853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9854 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9855 VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" ); 9856 # endif 9857 9858 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9859 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR( 9860 m_instance, 9861 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 9862 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9863 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9864 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); 9865 9866 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 9867 } 9868 9869 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9870 template <typename Dispatch> 9871 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWaylandSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9872 Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, 9873 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9874 Dispatch const & d ) const 9875 { 9876 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9877 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9878 VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" ); 9879 # endif 9880 9881 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9882 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR( 9883 m_instance, 9884 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 9885 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9886 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9887 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); 9888 9889 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9890 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9891 } 9892 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9893 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9894 9895 template <typename Dispatch> getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display * display,Dispatch const & d) const9896 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, 9897 struct wl_display * display, 9898 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9899 { 9900 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9901 return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); 9902 } 9903 9904 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9905 template <typename Dispatch> 9906 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display & display,Dispatch const & d) const9907 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9908 { 9909 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9910 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9911 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR && 9912 "Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> requires <VK_KHR_wayland_surface>" ); 9913 # endif 9914 9915 VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); 9916 9917 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 9918 } 9919 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9920 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ 9921 9922 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 9923 //=== VK_KHR_android_surface === 9924 9925 template <typename Dispatch> createAndroidSurfaceKHR(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9926 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, 9927 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9928 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9929 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9930 { 9931 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9932 return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, 9933 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), 9934 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9935 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 9936 } 9937 9938 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9939 template <typename Dispatch> 9940 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9941 Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, 9942 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9943 Dispatch const & d ) const 9944 { 9945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9946 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9947 VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" ); 9948 # endif 9949 9950 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9951 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR( 9952 m_instance, 9953 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 9954 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9955 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9956 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); 9957 9958 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 9959 } 9960 9961 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9962 template <typename Dispatch> 9963 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createAndroidSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9964 Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, 9965 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9966 Dispatch const & d ) const 9967 { 9968 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9969 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 9970 VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" ); 9971 # endif 9972 9973 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9974 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR( 9975 m_instance, 9976 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 9977 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9978 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9979 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); 9980 9981 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 9982 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9983 } 9984 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9985 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9986 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 9987 9988 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 9989 //=== VK_KHR_win32_surface === 9990 9991 template <typename Dispatch> createWin32SurfaceKHR(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9992 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, 9993 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9994 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9995 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9996 { 9997 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9998 return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, 9999 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), 10000 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10001 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10002 } 10003 10004 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10005 template <typename Dispatch> 10006 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10007 Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, 10008 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10009 Dispatch const & d ) const 10010 { 10011 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10012 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10013 VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" ); 10014 # endif 10015 10016 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10017 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10018 d.vkCreateWin32SurfaceKHR( m_instance, 10019 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 10020 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10021 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10022 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); 10023 10024 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 10025 } 10026 10027 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10028 template <typename Dispatch> 10029 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWin32SurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10030 Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, 10031 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10032 Dispatch const & d ) const 10033 { 10034 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10035 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10036 VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" ); 10037 # endif 10038 10039 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10040 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10041 d.vkCreateWin32SurfaceKHR( m_instance, 10042 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 10043 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10044 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10045 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); 10046 10047 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10048 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10049 } 10050 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10051 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10052 10053 template <typename Dispatch> getWin32PresentationSupportKHR(uint32_t queueFamilyIndex,Dispatch const & d) const10054 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10055 { 10056 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10057 return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); 10058 } 10059 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 10060 10061 //=== VK_EXT_debug_report === 10062 10063 template <typename Dispatch> 10064 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,Dispatch const & d) const10065 Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, 10066 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10067 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, 10068 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10069 { 10070 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10071 return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, 10072 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), 10073 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10074 reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) ); 10075 } 10076 10077 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10078 template <typename Dispatch> 10079 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10080 Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, 10081 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10082 Dispatch const & d ) const 10083 { 10084 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10085 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10086 VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10087 # endif 10088 10089 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 10090 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT( 10091 m_instance, 10092 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 10093 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10094 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 10095 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); 10096 10097 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( callback ) ); 10098 } 10099 10100 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10101 template <typename Dispatch> 10102 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type createDebugReportCallbackEXTUnique(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10103 Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, 10104 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10105 Dispatch const & d ) const 10106 { 10107 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10108 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10109 VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10110 # endif 10111 10112 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 10113 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT( 10114 m_instance, 10115 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 10116 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10117 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 10118 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); 10119 10120 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10121 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( callback, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10122 } 10123 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10124 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10125 10126 template <typename Dispatch> destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10127 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10128 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10129 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10130 { 10131 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10132 d.vkDestroyDebugReportCallbackEXT( 10133 m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10134 } 10135 10136 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10137 template <typename Dispatch> destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10138 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10139 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10140 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10141 { 10142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10143 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10144 VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10145 # endif 10146 10147 d.vkDestroyDebugReportCallbackEXT( 10148 m_instance, 10149 static_cast<VkDebugReportCallbackEXT>( callback ), 10150 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10151 } 10152 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10153 10154 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10155 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10156 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10157 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10158 { 10159 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10160 d.vkDestroyDebugReportCallbackEXT( 10161 m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10162 } 10163 10164 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10165 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10166 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 10167 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10168 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10169 { 10170 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10171 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10172 VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); 10173 # endif 10174 10175 d.vkDestroyDebugReportCallbackEXT( 10176 m_instance, 10177 static_cast<VkDebugReportCallbackEXT>( callback ), 10178 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10179 } 10180 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10181 10182 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,uint64_t object,size_t location,int32_t messageCode,const char * pLayerPrefix,const char * pMessage,Dispatch const & d) const10183 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 10184 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, 10185 uint64_t object, 10186 size_t location, 10187 int32_t messageCode, 10188 const char * pLayerPrefix, 10189 const char * pMessage, 10190 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10191 { 10192 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10193 d.vkDebugReportMessageEXT( m_instance, 10194 static_cast<VkDebugReportFlagsEXT>( flags ), 10195 static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), 10196 object, 10197 location, 10198 messageCode, 10199 pLayerPrefix, 10200 pMessage ); 10201 } 10202 10203 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10204 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,uint64_t object,size_t location,int32_t messageCode,const std::string & layerPrefix,const std::string & message,Dispatch const & d) const10205 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 10206 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, 10207 uint64_t object, 10208 size_t location, 10209 int32_t messageCode, 10210 const std::string & layerPrefix, 10211 const std::string & message, 10212 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10213 { 10214 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10215 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10216 VULKAN_HPP_ASSERT( d.vkDebugReportMessageEXT && "Function <vkDebugReportMessageEXT> requires <VK_EXT_debug_report>" ); 10217 # endif 10218 10219 d.vkDebugReportMessageEXT( m_instance, 10220 static_cast<VkDebugReportFlagsEXT>( flags ), 10221 static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), 10222 object, 10223 location, 10224 messageCode, 10225 layerPrefix.c_str(), 10226 message.c_str() ); 10227 } 10228 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10229 10230 //=== VK_EXT_debug_marker === 10231 10232 template <typename Dispatch> debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,Dispatch const & d) const10233 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, 10234 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10235 { 10236 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10237 return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) ); 10238 } 10239 10240 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10241 template <typename Dispatch> 10242 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo,Dispatch const & d) const10243 Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 10244 { 10245 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10246 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10247 VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectTagEXT && "Function <vkDebugMarkerSetObjectTagEXT> requires <VK_EXT_debug_marker>" ); 10248 # endif 10249 10250 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10251 d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) ); 10252 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); 10253 10254 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 10255 } 10256 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10257 10258 template <typename Dispatch> debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,Dispatch const & d) const10259 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, 10260 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10261 { 10262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10263 return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) ); 10264 } 10265 10266 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10267 template <typename Dispatch> 10268 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo,Dispatch const & d) const10269 Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 10270 { 10271 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10272 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10273 VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectNameEXT && "Function <vkDebugMarkerSetObjectNameEXT> requires <VK_EXT_debug_marker>" ); 10274 # endif 10275 10276 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10277 d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) ); 10278 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); 10279 10280 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 10281 } 10282 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10283 10284 template <typename Dispatch> debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const10285 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 10286 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10287 { 10288 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10289 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 10290 } 10291 10292 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10293 template <typename Dispatch> debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const10294 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, 10295 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10296 { 10297 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10298 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10299 VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerBeginEXT && "Function <vkCmdDebugMarkerBeginEXT> requires <VK_EXT_debug_marker>" ); 10300 # endif 10301 10302 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 10303 } 10304 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10305 10306 template <typename Dispatch> debugMarkerEndEXT(Dispatch const & d) const10307 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10308 { 10309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10310 d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); 10311 } 10312 10313 template <typename Dispatch> debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const10314 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 10315 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10316 { 10317 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10318 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 10319 } 10320 10321 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10322 template <typename Dispatch> debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const10323 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, 10324 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10325 { 10326 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10327 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10328 VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerInsertEXT && "Function <vkCmdDebugMarkerInsertEXT> requires <VK_EXT_debug_marker>" ); 10329 # endif 10330 10331 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 10332 } 10333 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10334 10335 //=== VK_KHR_video_queue === 10336 10337 template <typename Dispatch> getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,Dispatch const & d) const10338 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, 10339 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, 10340 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10341 { 10342 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10343 return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 10344 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) ); 10345 } 10346 10347 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10348 template <typename Dispatch> 10349 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,Dispatch const & d) const10350 PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const 10351 { 10352 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10353 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10354 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" ); 10355 # endif 10356 10357 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; 10358 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 10359 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 10360 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 10361 10362 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); 10363 } 10364 10365 template <typename X, typename Y, typename... Z, typename Dispatch> 10366 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,Dispatch const & d) const10367 PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const 10368 { 10369 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10370 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10371 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" ); 10372 # endif 10373 10374 StructureChain<X, Y, Z...> structureChain; 10375 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>(); 10376 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 10377 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 10378 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 10379 10380 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 10381 } 10382 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10383 10384 template <typename Dispatch> 10385 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,uint32_t * pVideoFormatPropertyCount,VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,Dispatch const & d) const10386 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, 10387 uint32_t * pVideoFormatPropertyCount, 10388 VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, 10389 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10390 { 10391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10392 return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 10393 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ), 10394 pVideoFormatPropertyCount, 10395 reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) ); 10396 } 10397 10398 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10399 template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch> 10400 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10401 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,Dispatch const & d) const10402 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const 10403 { 10404 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10405 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10406 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && 10407 "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); 10408 # endif 10409 10410 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties; 10411 uint32_t videoFormatPropertyCount; 10412 VULKAN_HPP_NAMESPACE::Result result; 10413 do 10414 { 10415 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 10416 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 10417 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 10418 { 10419 videoFormatProperties.resize( videoFormatPropertyCount ); 10420 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10421 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 10422 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 10423 &videoFormatPropertyCount, 10424 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 10425 } 10426 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 10427 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 10428 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 10429 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 10430 { 10431 videoFormatProperties.resize( videoFormatPropertyCount ); 10432 } 10433 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); 10434 } 10435 10436 template <typename VideoFormatPropertiesKHRAllocator, 10437 typename Dispatch, 10438 typename std::enable_if<std::is_same<typename VideoFormatPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, 10439 int>::type> 10440 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10441 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,Dispatch const & d) const10442 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, 10443 VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, 10444 Dispatch const & d ) const 10445 { 10446 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10447 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10448 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && 10449 "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); 10450 # endif 10451 10452 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator ); 10453 uint32_t videoFormatPropertyCount; 10454 VULKAN_HPP_NAMESPACE::Result result; 10455 do 10456 { 10457 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 10458 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 10459 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 10460 { 10461 videoFormatProperties.resize( videoFormatPropertyCount ); 10462 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10463 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 10464 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 10465 &videoFormatPropertyCount, 10466 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 10467 } 10468 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 10469 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 10470 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 10471 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 10472 { 10473 videoFormatProperties.resize( videoFormatPropertyCount ); 10474 } 10475 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); 10476 } 10477 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10478 10479 template <typename Dispatch> createVideoSessionKHR(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,Dispatch const & d) const10480 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, 10481 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10482 VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, 10483 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10484 { 10485 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10486 return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device, 10487 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ), 10488 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10489 reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) ); 10490 } 10491 10492 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10493 template <typename Dispatch> 10494 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type createVideoSessionKHR(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10495 Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, 10496 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10497 Dispatch const & d ) const 10498 { 10499 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10500 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10501 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" ); 10502 # endif 10503 10504 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 10505 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10506 d.vkCreateVideoSessionKHR( m_device, 10507 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 10508 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10509 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 10510 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); 10511 10512 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSession ) ); 10513 } 10514 10515 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10516 template <typename Dispatch> 10517 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type createVideoSessionKHRUnique(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10518 Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, 10519 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10520 Dispatch const & d ) const 10521 { 10522 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10523 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10524 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" ); 10525 # endif 10526 10527 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 10528 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10529 d.vkCreateVideoSessionKHR( m_device, 10530 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 10531 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10532 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 10533 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); 10534 10535 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 10536 result, UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 10537 } 10538 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10539 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10540 10541 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10542 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 10543 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10544 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10545 { 10546 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10547 d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10548 } 10549 10550 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10551 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10552 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 10553 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10554 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10555 { 10556 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10557 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10558 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" ); 10559 # endif 10560 10561 d.vkDestroyVideoSessionKHR( 10562 m_device, 10563 static_cast<VkVideoSessionKHR>( videoSession ), 10564 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10565 } 10566 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10567 10568 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10569 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 10570 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10571 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10572 { 10573 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10574 d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10575 } 10576 10577 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10578 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10579 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 10580 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10581 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10582 { 10583 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10584 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10585 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" ); 10586 # endif 10587 10588 d.vkDestroyVideoSessionKHR( 10589 m_device, 10590 static_cast<VkVideoSessionKHR>( videoSession ), 10591 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10592 } 10593 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10594 10595 template <typename Dispatch> 10596 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t * pMemoryRequirementsCount,VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,Dispatch const & d) const10597 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 10598 uint32_t * pMemoryRequirementsCount, 10599 VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, 10600 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10601 { 10602 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10603 return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 10604 static_cast<VkVideoSessionKHR>( videoSession ), 10605 pMemoryRequirementsCount, 10606 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) ); 10607 } 10608 10609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10610 template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch> 10611 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10612 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Dispatch const & d) const10613 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const 10614 { 10615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10616 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10617 VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" ); 10618 # endif 10619 10620 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements; 10621 uint32_t memoryRequirementsCount; 10622 VULKAN_HPP_NAMESPACE::Result result; 10623 do 10624 { 10625 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10626 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); 10627 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) 10628 { 10629 memoryRequirements.resize( memoryRequirementsCount ); 10630 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10631 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 10632 static_cast<VkVideoSessionKHR>( videoSession ), 10633 &memoryRequirementsCount, 10634 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); 10635 } 10636 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 10637 10638 VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); 10639 if ( memoryRequirementsCount < memoryRequirements.size() ) 10640 { 10641 memoryRequirements.resize( memoryRequirementsCount ); 10642 } 10643 return memoryRequirements; 10644 } 10645 10646 template <typename VideoSessionMemoryRequirementsKHRAllocator, 10647 typename Dispatch, 10648 typename std::enable_if< 10649 std::is_same<typename VideoSessionMemoryRequirementsKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value, 10650 int>::type> 10651 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10652 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,Dispatch const & d) const10653 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 10654 VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, 10655 Dispatch const & d ) const 10656 { 10657 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10658 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10659 VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" ); 10660 # endif 10661 10662 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements( 10663 videoSessionMemoryRequirementsKHRAllocator ); 10664 uint32_t memoryRequirementsCount; 10665 VULKAN_HPP_NAMESPACE::Result result; 10666 do 10667 { 10668 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10669 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); 10670 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) 10671 { 10672 memoryRequirements.resize( memoryRequirementsCount ); 10673 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10674 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 10675 static_cast<VkVideoSessionKHR>( videoSession ), 10676 &memoryRequirementsCount, 10677 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); 10678 } 10679 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 10680 10681 VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); 10682 if ( memoryRequirementsCount < memoryRequirements.size() ) 10683 { 10684 memoryRequirements.resize( memoryRequirementsCount ); 10685 } 10686 return memoryRequirements; 10687 } 10688 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10689 10690 template <typename Dispatch> 10691 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t bindSessionMemoryInfoCount,const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,Dispatch const & d) const10692 Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 10693 uint32_t bindSessionMemoryInfoCount, 10694 const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, 10695 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10696 { 10697 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10698 return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device, 10699 static_cast<VkVideoSessionKHR>( videoSession ), 10700 bindSessionMemoryInfoCount, 10701 reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) ); 10702 } 10703 10704 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10705 template <typename Dispatch> bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos,Dispatch const & d) const10706 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindVideoSessionMemoryKHR( 10707 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 10708 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos, 10709 Dispatch const & d ) const 10710 { 10711 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10712 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10713 VULKAN_HPP_ASSERT( d.vkBindVideoSessionMemoryKHR && "Function <vkBindVideoSessionMemoryKHR> requires <VK_KHR_video_queue>" ); 10714 # endif 10715 10716 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10717 d.vkBindVideoSessionMemoryKHR( m_device, 10718 static_cast<VkVideoSessionKHR>( videoSession ), 10719 bindSessionMemoryInfos.size(), 10720 reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) ) ); 10721 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); 10722 10723 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 10724 } 10725 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10726 10727 template <typename Dispatch> 10728 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,Dispatch const & d) const10729 Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, 10730 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10731 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, 10732 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10733 { 10734 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10735 return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device, 10736 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ), 10737 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10738 reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) ); 10739 } 10740 10741 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10742 template <typename Dispatch> 10743 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type createVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10744 Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, 10745 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10746 Dispatch const & d ) const 10747 { 10748 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10749 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10750 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 10751 # endif 10752 10753 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 10754 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR( 10755 m_device, 10756 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 10757 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10758 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 10759 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); 10760 10761 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSessionParameters ) ); 10762 } 10763 10764 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10765 template <typename Dispatch> 10766 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type createVideoSessionParametersKHRUnique(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10767 Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, 10768 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10769 Dispatch const & d ) const 10770 { 10771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10772 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10773 VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 10774 # endif 10775 10776 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 10777 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR( 10778 m_device, 10779 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 10780 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10781 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 10782 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); 10783 10784 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 10785 UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>( 10786 videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 10787 } 10788 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10789 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10790 10791 template <typename Dispatch> 10792 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,Dispatch const & d) const10793 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 10794 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, 10795 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10796 { 10797 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10798 return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device, 10799 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 10800 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) ); 10801 } 10802 10803 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10804 template <typename Dispatch> 10805 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,Dispatch const & d) const10806 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 10807 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, 10808 Dispatch const & d ) const 10809 { 10810 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10811 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10812 VULKAN_HPP_ASSERT( d.vkUpdateVideoSessionParametersKHR && "Function <vkUpdateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 10813 # endif 10814 10815 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10816 d.vkUpdateVideoSessionParametersKHR( m_device, 10817 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 10818 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) ); 10819 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); 10820 10821 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 10822 } 10823 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10824 10825 template <typename Dispatch> destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10826 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 10827 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10828 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10829 { 10830 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10831 d.vkDestroyVideoSessionParametersKHR( 10832 m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10833 } 10834 10835 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10836 template <typename Dispatch> destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10837 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 10838 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10839 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10840 { 10841 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10842 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10843 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 10844 # endif 10845 10846 d.vkDestroyVideoSessionParametersKHR( 10847 m_device, 10848 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 10849 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10850 } 10851 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10852 10853 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10854 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 10855 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10856 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10857 { 10858 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10859 d.vkDestroyVideoSessionParametersKHR( 10860 m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10861 } 10862 10863 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10864 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10865 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 10866 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10867 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10868 { 10869 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10870 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10871 VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); 10872 # endif 10873 10874 d.vkDestroyVideoSessionParametersKHR( 10875 m_device, 10876 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 10877 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10878 } 10879 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10880 10881 template <typename Dispatch> beginVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,Dispatch const & d) const10882 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, 10883 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10884 { 10885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10886 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) ); 10887 } 10888 10889 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10890 template <typename Dispatch> beginVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,Dispatch const & d) const10891 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, 10892 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10893 { 10894 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10895 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10896 VULKAN_HPP_ASSERT( d.vkCmdBeginVideoCodingKHR && "Function <vkCmdBeginVideoCodingKHR> requires <VK_KHR_video_queue>" ); 10897 # endif 10898 10899 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) ); 10900 } 10901 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10902 10903 template <typename Dispatch> endVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,Dispatch const & d) const10904 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, 10905 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10906 { 10907 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10908 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) ); 10909 } 10910 10911 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10912 template <typename Dispatch> endVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,Dispatch const & d) const10913 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, 10914 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10915 { 10916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10917 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10918 VULKAN_HPP_ASSERT( d.vkCmdEndVideoCodingKHR && "Function <vkCmdEndVideoCodingKHR> requires <VK_KHR_video_queue>" ); 10919 # endif 10920 10921 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) ); 10922 } 10923 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10924 10925 template <typename Dispatch> controlVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,Dispatch const & d) const10926 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, 10927 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10928 { 10929 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10930 d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) ); 10931 } 10932 10933 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10934 template <typename Dispatch> controlVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,Dispatch const & d) const10935 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, 10936 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10937 { 10938 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10939 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10940 VULKAN_HPP_ASSERT( d.vkCmdControlVideoCodingKHR && "Function <vkCmdControlVideoCodingKHR> requires <VK_KHR_video_queue>" ); 10941 # endif 10942 10943 d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) ); 10944 } 10945 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10946 10947 //=== VK_KHR_video_decode_queue === 10948 10949 template <typename Dispatch> decodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,Dispatch const & d) const10950 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, 10951 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10952 { 10953 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10954 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) ); 10955 } 10956 10957 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10958 template <typename Dispatch> decodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,Dispatch const & d) const10959 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, 10960 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10961 { 10962 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10963 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 10964 VULKAN_HPP_ASSERT( d.vkCmdDecodeVideoKHR && "Function <vkCmdDecodeVideoKHR> requires <VK_KHR_video_decode_queue>" ); 10965 # endif 10966 10967 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) ); 10968 } 10969 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10970 10971 //=== VK_EXT_transform_feedback === 10972 10973 template <typename Dispatch> bindTransformFeedbackBuffersEXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,Dispatch const & d) const10974 VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 10975 uint32_t bindingCount, 10976 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 10977 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 10978 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 10979 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10980 { 10981 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10982 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, 10983 firstBinding, 10984 bindingCount, 10985 reinterpret_cast<const VkBuffer *>( pBuffers ), 10986 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 10987 reinterpret_cast<const VkDeviceSize *>( pSizes ) ); 10988 } 10989 10990 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10991 template <typename Dispatch> 10992 VULKAN_HPP_INLINE void bindTransformFeedbackBuffersEXT(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,Dispatch const & d) const10993 CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 10994 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 10995 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 10996 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 10997 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 10998 { 10999 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11000 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11001 VULKAN_HPP_ASSERT( d.vkCmdBindTransformFeedbackBuffersEXT && "Function <vkCmdBindTransformFeedbackBuffersEXT> requires <VK_EXT_transform_feedback>" ); 11002 # endif 11003 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11004 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 11005 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 11006 # else 11007 if ( buffers.size() != offsets.size() ) 11008 { 11009 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); 11010 } 11011 if ( !sizes.empty() && buffers.size() != sizes.size() ) 11012 { 11013 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); 11014 } 11015 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11016 11017 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, 11018 firstBinding, 11019 buffers.size(), 11020 reinterpret_cast<const VkBuffer *>( buffers.data() ), 11021 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 11022 reinterpret_cast<const VkDeviceSize *>( sizes.data() ) ); 11023 } 11024 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11025 11026 template <typename Dispatch> beginTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const11027 VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, 11028 uint32_t counterBufferCount, 11029 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 11030 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 11031 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11032 { 11033 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11034 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, 11035 firstCounterBuffer, 11036 counterBufferCount, 11037 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 11038 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 11039 } 11040 11041 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11042 template <typename Dispatch> 11043 VULKAN_HPP_INLINE void beginTransformFeedbackEXT(uint32_t firstCounterBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const11044 CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, 11045 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 11046 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 11047 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 11048 { 11049 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11050 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11051 VULKAN_HPP_ASSERT( d.vkCmdBeginTransformFeedbackEXT && "Function <vkCmdBeginTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" ); 11052 # endif 11053 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11054 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 11055 # else 11056 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 11057 { 11058 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 11059 } 11060 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11061 11062 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, 11063 firstCounterBuffer, 11064 counterBuffers.size(), 11065 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 11066 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 11067 } 11068 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11069 11070 template <typename Dispatch> endTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const11071 VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, 11072 uint32_t counterBufferCount, 11073 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 11074 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 11075 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11076 { 11077 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11078 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, 11079 firstCounterBuffer, 11080 counterBufferCount, 11081 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 11082 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 11083 } 11084 11085 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11086 template <typename Dispatch> 11087 VULKAN_HPP_INLINE void endTransformFeedbackEXT(uint32_t firstCounterBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const11088 CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, 11089 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 11090 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 11091 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 11092 { 11093 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11094 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11095 VULKAN_HPP_ASSERT( d.vkCmdEndTransformFeedbackEXT && "Function <vkCmdEndTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" ); 11096 # endif 11097 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11098 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 11099 # else 11100 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 11101 { 11102 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 11103 } 11104 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11105 11106 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, 11107 firstCounterBuffer, 11108 counterBuffers.size(), 11109 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 11110 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 11111 } 11112 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11113 11114 template <typename Dispatch> beginQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,uint32_t index,Dispatch const & d) const11115 VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 11116 uint32_t query, 11117 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 11118 uint32_t index, 11119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11120 { 11121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11122 d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index ); 11123 } 11124 11125 template <typename Dispatch> 11126 VULKAN_HPP_INLINE void endQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,uint32_t index,Dispatch const & d) const11127 CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11128 { 11129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11130 d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index ); 11131 } 11132 11133 template <typename Dispatch> drawIndirectByteCountEXT(uint32_t instanceCount,uint32_t firstInstance,VULKAN_HPP_NAMESPACE::Buffer counterBuffer,VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,uint32_t counterOffset,uint32_t vertexStride,Dispatch const & d) const11134 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, 11135 uint32_t firstInstance, 11136 VULKAN_HPP_NAMESPACE::Buffer counterBuffer, 11137 VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, 11138 uint32_t counterOffset, 11139 uint32_t vertexStride, 11140 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11141 { 11142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11143 d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, 11144 instanceCount, 11145 firstInstance, 11146 static_cast<VkBuffer>( counterBuffer ), 11147 static_cast<VkDeviceSize>( counterBufferOffset ), 11148 counterOffset, 11149 vertexStride ); 11150 } 11151 11152 //=== VK_NVX_binary_import === 11153 11154 template <typename Dispatch> createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,Dispatch const & d) const11155 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, 11156 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11157 VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, 11158 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11159 { 11160 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11161 return static_cast<Result>( d.vkCreateCuModuleNVX( m_device, 11162 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ), 11163 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11164 reinterpret_cast<VkCuModuleNVX *>( pModule ) ) ); 11165 } 11166 11167 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11168 template <typename Dispatch> 11169 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11170 Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, 11171 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11172 Dispatch const & d ) const 11173 { 11174 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11175 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11176 VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" ); 11177 # endif 11178 11179 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 11180 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11181 d.vkCreateCuModuleNVX( m_device, 11182 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 11183 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11184 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 11185 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); 11186 11187 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); 11188 } 11189 11190 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11191 template <typename Dispatch> 11192 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type createCuModuleNVXUnique(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11193 Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, 11194 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11195 Dispatch const & d ) const 11196 { 11197 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11198 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11199 VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" ); 11200 # endif 11201 11202 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 11203 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11204 d.vkCreateCuModuleNVX( m_device, 11205 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 11206 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11207 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 11208 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); 11209 11210 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 11211 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 11212 } 11213 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11214 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11215 11216 template <typename Dispatch> createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,Dispatch const & d) const11217 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, 11218 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11219 VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, 11220 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11221 { 11222 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11223 return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device, 11224 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ), 11225 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11226 reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) ); 11227 } 11228 11229 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11230 template <typename Dispatch> 11231 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11232 Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, 11233 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11234 Dispatch const & d ) const 11235 { 11236 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11237 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11238 VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" ); 11239 # endif 11240 11241 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 11242 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11243 d.vkCreateCuFunctionNVX( m_device, 11244 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 11245 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11246 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 11247 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); 11248 11249 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); 11250 } 11251 11252 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11253 template <typename Dispatch> 11254 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type createCuFunctionNVXUnique(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11255 Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, 11256 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11257 Dispatch const & d ) const 11258 { 11259 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11260 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11261 VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" ); 11262 # endif 11263 11264 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 11265 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11266 d.vkCreateCuFunctionNVX( m_device, 11267 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 11268 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11269 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 11270 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); 11271 11272 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 11273 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 11274 } 11275 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11276 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11277 11278 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11279 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 11280 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11282 { 11283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11284 d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11285 } 11286 11287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11288 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11289 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 11290 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11291 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11292 { 11293 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11294 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11295 VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" ); 11296 # endif 11297 11298 d.vkDestroyCuModuleNVX( m_device, 11299 static_cast<VkCuModuleNVX>( module ), 11300 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11301 } 11302 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11303 11304 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11305 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 11306 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11307 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11308 { 11309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11310 d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11311 } 11312 11313 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11314 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11315 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 11316 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11317 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11318 { 11319 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11320 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11321 VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" ); 11322 # endif 11323 11324 d.vkDestroyCuModuleNVX( m_device, 11325 static_cast<VkCuModuleNVX>( module ), 11326 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11327 } 11328 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11329 11330 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11331 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 11332 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11333 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11334 { 11335 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11336 d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11337 } 11338 11339 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11340 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11341 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 11342 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11343 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11344 { 11345 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11346 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11347 VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" ); 11348 # endif 11349 11350 d.vkDestroyCuFunctionNVX( m_device, 11351 static_cast<VkCuFunctionNVX>( function ), 11352 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11353 } 11354 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11355 11356 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11357 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 11358 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11359 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11360 { 11361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11362 d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11363 } 11364 11365 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11366 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11367 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 11368 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11369 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11370 { 11371 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11372 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11373 VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" ); 11374 # endif 11375 11376 d.vkDestroyCuFunctionNVX( m_device, 11377 static_cast<VkCuFunctionNVX>( function ), 11378 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11379 } 11380 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11381 11382 template <typename Dispatch> cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,Dispatch const & d) const11383 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, 11384 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11385 { 11386 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11387 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) ); 11388 } 11389 11390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11391 template <typename Dispatch> cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo,Dispatch const & d) const11392 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, 11393 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11394 { 11395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11396 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11397 VULKAN_HPP_ASSERT( d.vkCmdCuLaunchKernelNVX && "Function <vkCmdCuLaunchKernelNVX> requires <VK_NVX_binary_import>" ); 11398 # endif 11399 11400 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) ); 11401 } 11402 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11403 11404 //=== VK_NVX_image_view_handle === 11405 11406 template <typename Dispatch> getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,Dispatch const & d) const11407 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, 11408 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11409 { 11410 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11411 return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); 11412 } 11413 11414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11415 template <typename Dispatch> getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,Dispatch const & d) const11416 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, 11417 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11418 { 11419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11420 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11421 VULKAN_HPP_ASSERT( d.vkGetImageViewHandleNVX && "Function <vkGetImageViewHandleNVX> requires <VK_NVX_image_view_handle>" ); 11422 # endif 11423 11424 uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) ); 11425 11426 return result; 11427 } 11428 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11429 11430 template <typename Dispatch> getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,Dispatch const & d) const11431 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, 11432 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, 11433 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11434 { 11435 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11436 return static_cast<Result>( 11437 d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) ); 11438 } 11439 11440 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11441 template <typename Dispatch> 11442 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,Dispatch const & d) const11443 Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const 11444 { 11445 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11446 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11447 VULKAN_HPP_ASSERT( d.vkGetImageViewAddressNVX && "Function <vkGetImageViewAddressNVX> requires <VK_NVX_image_view_handle>" ); 11448 # endif 11449 11450 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; 11451 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11452 d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) ); 11453 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); 11454 11455 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 11456 } 11457 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11458 11459 //=== VK_AMD_draw_indirect_count === 11460 11461 template <typename Dispatch> drawIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const11462 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 11463 VULKAN_HPP_NAMESPACE::DeviceSize offset, 11464 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 11465 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 11466 uint32_t maxDrawCount, 11467 uint32_t stride, 11468 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11469 { 11470 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11471 d.vkCmdDrawIndirectCountAMD( m_commandBuffer, 11472 static_cast<VkBuffer>( buffer ), 11473 static_cast<VkDeviceSize>( offset ), 11474 static_cast<VkBuffer>( countBuffer ), 11475 static_cast<VkDeviceSize>( countBufferOffset ), 11476 maxDrawCount, 11477 stride ); 11478 } 11479 11480 template <typename Dispatch> drawIndexedIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const11481 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 11482 VULKAN_HPP_NAMESPACE::DeviceSize offset, 11483 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 11484 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 11485 uint32_t maxDrawCount, 11486 uint32_t stride, 11487 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11488 { 11489 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11490 d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, 11491 static_cast<VkBuffer>( buffer ), 11492 static_cast<VkDeviceSize>( offset ), 11493 static_cast<VkBuffer>( countBuffer ), 11494 static_cast<VkDeviceSize>( countBufferOffset ), 11495 maxDrawCount, 11496 stride ); 11497 } 11498 11499 //=== VK_AMD_shader_info === 11500 11501 template <typename Dispatch> getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,size_t * pInfoSize,void * pInfo,Dispatch const & d) const11502 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 11503 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 11504 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 11505 size_t * pInfoSize, 11506 void * pInfo, 11507 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11508 { 11509 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11510 return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, 11511 static_cast<VkPipeline>( pipeline ), 11512 static_cast<VkShaderStageFlagBits>( shaderStage ), 11513 static_cast<VkShaderInfoTypeAMD>( infoType ), 11514 pInfoSize, 11515 pInfo ) ); 11516 } 11517 11518 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11519 template <typename Uint8_tAllocator, typename Dispatch> 11520 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Dispatch const & d) const11521 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 11522 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 11523 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 11524 Dispatch const & d ) const 11525 { 11526 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11527 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11528 VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" ); 11529 # endif 11530 11531 std::vector<uint8_t, Uint8_tAllocator> info; 11532 size_t infoSize; 11533 VULKAN_HPP_NAMESPACE::Result result; 11534 do 11535 { 11536 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 11537 static_cast<VkPipeline>( pipeline ), 11538 static_cast<VkShaderStageFlagBits>( shaderStage ), 11539 static_cast<VkShaderInfoTypeAMD>( infoType ), 11540 &infoSize, 11541 nullptr ) ); 11542 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) 11543 { 11544 info.resize( infoSize ); 11545 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 11546 static_cast<VkPipeline>( pipeline ), 11547 static_cast<VkShaderStageFlagBits>( shaderStage ), 11548 static_cast<VkShaderInfoTypeAMD>( infoType ), 11549 &infoSize, 11550 reinterpret_cast<void *>( info.data() ) ) ); 11551 } 11552 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11553 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 11554 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 11555 if ( infoSize < info.size() ) 11556 { 11557 info.resize( infoSize ); 11558 } 11559 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); 11560 } 11561 11562 template <typename Uint8_tAllocator, 11563 typename Dispatch, 11564 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 11565 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const11566 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 11567 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 11568 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 11569 Uint8_tAllocator & uint8_tAllocator, 11570 Dispatch const & d ) const 11571 { 11572 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11573 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11574 VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" ); 11575 # endif 11576 11577 std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator ); 11578 size_t infoSize; 11579 VULKAN_HPP_NAMESPACE::Result result; 11580 do 11581 { 11582 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 11583 static_cast<VkPipeline>( pipeline ), 11584 static_cast<VkShaderStageFlagBits>( shaderStage ), 11585 static_cast<VkShaderInfoTypeAMD>( infoType ), 11586 &infoSize, 11587 nullptr ) ); 11588 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) 11589 { 11590 info.resize( infoSize ); 11591 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 11592 static_cast<VkPipeline>( pipeline ), 11593 static_cast<VkShaderStageFlagBits>( shaderStage ), 11594 static_cast<VkShaderInfoTypeAMD>( infoType ), 11595 &infoSize, 11596 reinterpret_cast<void *>( info.data() ) ) ); 11597 } 11598 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11599 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 11600 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 11601 if ( infoSize < info.size() ) 11602 { 11603 info.resize( infoSize ); 11604 } 11605 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); 11606 } 11607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11608 11609 //=== VK_KHR_dynamic_rendering === 11610 11611 template <typename Dispatch> beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,Dispatch const & d) const11612 VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 11613 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11614 { 11615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11616 d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); 11617 } 11618 11619 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11620 template <typename Dispatch> beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const11621 VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, 11622 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11623 { 11624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11625 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11626 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderingKHR && "Function <vkCmdBeginRenderingKHR> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" ); 11627 # endif 11628 11629 d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); 11630 } 11631 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11632 11633 template <typename Dispatch> endRenderingKHR(Dispatch const & d) const11634 VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11635 { 11636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11637 d.vkCmdEndRenderingKHR( m_commandBuffer ); 11638 } 11639 11640 #if defined( VK_USE_PLATFORM_GGP ) 11641 //=== VK_GGP_stream_descriptor_surface === 11642 11643 template <typename Dispatch> 11644 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createStreamDescriptorSurfaceGGP(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const11645 Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, 11646 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11647 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 11648 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11649 { 11650 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11651 return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, 11652 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), 11653 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11654 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 11655 } 11656 11657 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11658 template <typename Dispatch> 11659 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11660 Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, 11661 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11662 Dispatch const & d ) const 11663 { 11664 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11665 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11666 VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" ); 11667 # endif 11668 11669 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 11670 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP( 11671 m_instance, 11672 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 11673 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11674 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 11675 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); 11676 11677 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 11678 } 11679 11680 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11681 template <typename Dispatch> 11682 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createStreamDescriptorSurfaceGGPUnique(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11683 Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, 11684 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11685 Dispatch const & d ) const 11686 { 11687 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11688 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11689 VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" ); 11690 # endif 11691 11692 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 11693 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP( 11694 m_instance, 11695 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 11696 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11697 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 11698 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); 11699 11700 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 11701 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 11702 } 11703 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11704 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11705 #endif /*VK_USE_PLATFORM_GGP*/ 11706 11707 //=== VK_NV_external_memory_capabilities === 11708 11709 template <typename Dispatch> 11710 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,Dispatch const & d) const11711 PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, 11712 VULKAN_HPP_NAMESPACE::ImageType type, 11713 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 11714 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 11715 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 11716 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 11717 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, 11718 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11719 { 11720 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11721 return static_cast<Result>( 11722 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, 11723 static_cast<VkFormat>( format ), 11724 static_cast<VkImageType>( type ), 11725 static_cast<VkImageTiling>( tiling ), 11726 static_cast<VkImageUsageFlags>( usage ), 11727 static_cast<VkImageCreateFlags>( flags ), 11728 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 11729 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) ); 11730 } 11731 11732 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11733 template <typename Dispatch> 11734 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,Dispatch const & d) const11735 PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, 11736 VULKAN_HPP_NAMESPACE::ImageType type, 11737 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 11738 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 11739 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 11740 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 11741 Dispatch const & d ) const 11742 { 11743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11744 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11745 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV && 11746 "Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> requires <VK_NV_external_memory_capabilities>" ); 11747 # endif 11748 11749 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; 11750 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11751 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, 11752 static_cast<VkFormat>( format ), 11753 static_cast<VkImageType>( type ), 11754 static_cast<VkImageTiling>( tiling ), 11755 static_cast<VkImageUsageFlags>( usage ), 11756 static_cast<VkImageCreateFlags>( flags ), 11757 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 11758 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) ); 11759 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); 11760 11761 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( externalImageFormatProperties ) ); 11762 } 11763 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11764 11765 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 11766 //=== VK_NV_external_memory_win32 === 11767 11768 template <typename Dispatch> getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,HANDLE * pHandle,Dispatch const & d) const11769 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 11770 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, 11771 HANDLE * pHandle, 11772 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11773 { 11774 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11775 return static_cast<Result>( 11776 d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) ); 11777 } 11778 11779 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11780 template <typename Dispatch> getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,Dispatch const & d) const11781 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( 11782 VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const 11783 { 11784 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11785 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11786 VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleNV && "Function <vkGetMemoryWin32HandleNV> requires <VK_NV_external_memory_win32>" ); 11787 # endif 11788 11789 HANDLE handle; 11790 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11791 d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) ); 11792 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); 11793 11794 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 11795 } 11796 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11797 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 11798 11799 //=== VK_KHR_get_physical_device_properties2 === 11800 11801 template <typename Dispatch> getFeatures2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const11802 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, 11803 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11804 { 11805 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11806 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 11807 } 11808 11809 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11810 template <typename Dispatch> 11811 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const & d) const11812 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11813 { 11814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11815 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11816 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && 11817 "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 11818 # endif 11819 11820 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 11821 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 11822 11823 return features; 11824 } 11825 11826 template <typename X, typename Y, typename... Z, typename Dispatch> 11827 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const & d) const11828 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11829 { 11830 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11831 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11832 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && 11833 "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 11834 # endif 11835 11836 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 11837 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 11838 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 11839 11840 return structureChain; 11841 } 11842 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11843 11844 template <typename Dispatch> getProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const11845 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 11846 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11847 { 11848 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11849 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 11850 } 11851 11852 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11853 template <typename Dispatch> 11854 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const & d) const11855 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11856 { 11857 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11858 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11859 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && 11860 "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 11861 # endif 11862 11863 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 11864 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 11865 11866 return properties; 11867 } 11868 11869 template <typename X, typename Y, typename... Z, typename Dispatch> 11870 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const & d) const11871 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11872 { 11873 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11874 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11875 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && 11876 "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 11877 # endif 11878 11879 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 11880 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 11881 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 11882 11883 return structureChain; 11884 } 11885 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11886 11887 template <typename Dispatch> getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const11888 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, 11889 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 11890 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11891 { 11892 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11893 d.vkGetPhysicalDeviceFormatProperties2KHR( 11894 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 11895 } 11896 11897 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11898 template <typename Dispatch> 11899 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const11900 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11901 { 11902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11903 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11904 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && 11905 "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 11906 # endif 11907 11908 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 11909 d.vkGetPhysicalDeviceFormatProperties2KHR( 11910 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 11911 11912 return formatProperties; 11913 } 11914 11915 template <typename X, typename Y, typename... Z, typename Dispatch> 11916 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const11917 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11918 { 11919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11920 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11921 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && 11922 "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 11923 # endif 11924 11925 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 11926 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 11927 d.vkGetPhysicalDeviceFormatProperties2KHR( 11928 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 11929 11930 return structureChain; 11931 } 11932 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11933 11934 template <typename Dispatch> 11935 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const11936 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 11937 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 11938 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11939 { 11940 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11941 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 11942 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 11943 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 11944 } 11945 11946 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11947 template <typename Dispatch> 11948 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const11949 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 11950 { 11951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11952 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11953 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && 11954 "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 11955 # endif 11956 11957 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 11958 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11959 d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 11960 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 11961 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 11962 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 11963 11964 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 11965 } 11966 11967 template <typename X, typename Y, typename... Z, typename Dispatch> 11968 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const11969 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 11970 { 11971 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11972 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 11973 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && 11974 "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 11975 # endif 11976 11977 StructureChain<X, Y, Z...> structureChain; 11978 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 11979 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11980 d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 11981 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 11982 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 11983 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 11984 11985 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 11986 } 11987 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11988 11989 template <typename Dispatch> getQueueFamilyProperties2KHR(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const11990 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, 11991 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 11992 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11993 { 11994 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11995 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 11996 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 11997 } 11998 11999 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12000 template <typename QueueFamilyProperties2Allocator, typename Dispatch> 12001 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(Dispatch const & d) const12002 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 12003 { 12004 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12005 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12006 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12007 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12008 # endif 12009 12010 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 12011 uint32_t queueFamilyPropertyCount; 12012 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12013 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12014 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12015 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12016 12017 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12018 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12019 { 12020 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12021 } 12022 return queueFamilyProperties; 12023 } 12024 12025 template < 12026 typename QueueFamilyProperties2Allocator, 12027 typename Dispatch, 12028 typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 12029 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const12030 PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const 12031 { 12032 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12033 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12034 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12035 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12036 # endif 12037 12038 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); 12039 uint32_t queueFamilyPropertyCount; 12040 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12041 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12042 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12043 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12044 12045 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12046 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12047 { 12048 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12049 } 12050 return queueFamilyProperties; 12051 } 12052 12053 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 12054 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(Dispatch const & d) const12055 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 12056 { 12057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12058 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12059 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12060 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12061 # endif 12062 12063 std::vector<StructureChain, StructureChainAllocator> structureChains; 12064 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 12065 uint32_t queueFamilyPropertyCount; 12066 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12067 structureChains.resize( queueFamilyPropertyCount ); 12068 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12069 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 12070 { 12071 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 12072 } 12073 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12074 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12075 12076 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12077 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12078 { 12079 structureChains.resize( queueFamilyPropertyCount ); 12080 } 12081 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 12082 { 12083 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 12084 } 12085 return structureChains; 12086 } 12087 12088 template <typename StructureChain, 12089 typename StructureChainAllocator, 12090 typename Dispatch, 12091 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 12092 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const12093 PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const 12094 { 12095 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12096 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12097 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && 12098 "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12099 # endif 12100 12101 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 12102 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 12103 uint32_t queueFamilyPropertyCount; 12104 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 12105 structureChains.resize( queueFamilyPropertyCount ); 12106 queueFamilyProperties.resize( queueFamilyPropertyCount ); 12107 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 12108 { 12109 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 12110 } 12111 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 12112 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 12113 12114 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 12115 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 12116 { 12117 structureChains.resize( queueFamilyPropertyCount ); 12118 } 12119 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 12120 { 12121 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 12122 } 12123 return structureChains; 12124 } 12125 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12126 12127 template <typename Dispatch> getMemoryProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const12128 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 12129 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12130 { 12131 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12132 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 12133 } 12134 12135 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12136 template <typename Dispatch> 12137 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const & d) const12138 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12139 { 12140 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12141 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12142 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && 12143 "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12144 # endif 12145 12146 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 12147 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 12148 12149 return memoryProperties; 12150 } 12151 12152 template <typename X, typename Y, typename... Z, typename Dispatch> 12153 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2KHR(Dispatch const & d) const12154 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12155 { 12156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12157 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12158 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && 12159 "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12160 # endif 12161 12162 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 12163 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 12164 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 12165 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 12166 12167 return structureChain; 12168 } 12169 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12170 12171 template <typename Dispatch> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const12172 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 12173 uint32_t * pPropertyCount, 12174 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 12175 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12176 { 12177 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12178 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 12179 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 12180 pPropertyCount, 12181 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 12182 } 12183 12184 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12185 template <typename SparseImageFormatProperties2Allocator, typename Dispatch> 12186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const12187 PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 12188 Dispatch const & d ) const 12189 { 12190 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12191 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12192 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && 12193 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12194 # endif 12195 12196 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 12197 uint32_t propertyCount; 12198 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 12199 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 12200 properties.resize( propertyCount ); 12201 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 12202 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 12203 &propertyCount, 12204 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 12205 12206 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 12207 if ( propertyCount < properties.size() ) 12208 { 12209 properties.resize( propertyCount ); 12210 } 12211 return properties; 12212 } 12213 12214 template < 12215 typename SparseImageFormatProperties2Allocator, 12216 typename Dispatch, 12217 typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, 12218 int>::type> 12219 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const12220 PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 12221 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 12222 Dispatch const & d ) const 12223 { 12224 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12225 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12226 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && 12227 "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); 12228 # endif 12229 12230 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); 12231 uint32_t propertyCount; 12232 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 12233 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 12234 properties.resize( propertyCount ); 12235 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 12236 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 12237 &propertyCount, 12238 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 12239 12240 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 12241 if ( propertyCount < properties.size() ) 12242 { 12243 properties.resize( propertyCount ); 12244 } 12245 return properties; 12246 } 12247 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12248 12249 //=== VK_KHR_device_group === 12250 12251 template <typename Dispatch> getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const12252 VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, 12253 uint32_t localDeviceIndex, 12254 uint32_t remoteDeviceIndex, 12255 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 12256 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12257 { 12258 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12259 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( 12260 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 12261 } 12262 12263 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12264 template <typename Dispatch> getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const12265 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( 12266 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12267 { 12268 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12269 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12270 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeaturesKHR && 12271 "Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" ); 12272 # endif 12273 12274 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 12275 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( 12276 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 12277 12278 return peerMemoryFeatures; 12279 } 12280 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12281 12282 template <typename Dispatch> setDeviceMaskKHR(uint32_t deviceMask,Dispatch const & d) const12283 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12284 { 12285 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12286 d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); 12287 } 12288 12289 template <typename Dispatch> dispatchBaseKHR(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const12290 VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, 12291 uint32_t baseGroupY, 12292 uint32_t baseGroupZ, 12293 uint32_t groupCountX, 12294 uint32_t groupCountY, 12295 uint32_t groupCountZ, 12296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12297 { 12298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12299 d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 12300 } 12301 12302 #if defined( VK_USE_PLATFORM_VI_NN ) 12303 //=== VK_NN_vi_surface === 12304 12305 template <typename Dispatch> createViSurfaceNN(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const12306 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, 12307 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12308 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 12309 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12310 { 12311 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12312 return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, 12313 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), 12314 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12315 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 12316 } 12317 12318 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12319 template <typename Dispatch> 12320 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12321 Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, 12322 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12323 Dispatch const & d ) const 12324 { 12325 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12326 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12327 VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" ); 12328 # endif 12329 12330 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12331 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12332 d.vkCreateViSurfaceNN( m_instance, 12333 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 12334 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12335 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12336 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); 12337 12338 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 12339 } 12340 12341 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12342 template <typename Dispatch> 12343 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createViSurfaceNNUnique(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12344 Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, 12345 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12346 Dispatch const & d ) const 12347 { 12348 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12349 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12350 VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" ); 12351 # endif 12352 12353 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12354 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12355 d.vkCreateViSurfaceNN( m_instance, 12356 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 12357 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12358 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12359 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); 12360 12361 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 12362 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 12363 } 12364 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12365 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12366 #endif /*VK_USE_PLATFORM_VI_NN*/ 12367 12368 //=== VK_KHR_maintenance1 === 12369 12370 template <typename Dispatch> trimCommandPoolKHR(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const12371 VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 12372 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 12373 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12374 { 12375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12376 d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 12377 } 12378 12379 //=== VK_KHR_device_group_creation === 12380 12381 template <typename Dispatch> 12382 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumeratePhysicalDeviceGroupsKHR(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const12383 Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, 12384 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 12385 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12386 { 12387 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12388 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 12389 m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 12390 } 12391 12392 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12393 template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> 12394 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 12395 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const & d) const12396 Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const 12397 { 12398 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12399 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12400 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && 12401 "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 12402 # endif 12403 12404 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 12405 uint32_t physicalDeviceGroupCount; 12406 VULKAN_HPP_NAMESPACE::Result result; 12407 do 12408 { 12409 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 12410 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 12411 { 12412 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 12413 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 12414 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 12415 } 12416 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12417 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 12418 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 12419 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 12420 { 12421 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 12422 } 12423 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 12424 } 12425 12426 template <typename PhysicalDeviceGroupPropertiesAllocator, 12427 typename Dispatch, 12428 typename std::enable_if< 12429 std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, 12430 int>::type> 12431 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 12432 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const12433 Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 12434 { 12435 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12436 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12437 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && 12438 "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); 12439 # endif 12440 12441 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 12442 physicalDeviceGroupPropertiesAllocator ); 12443 uint32_t physicalDeviceGroupCount; 12444 VULKAN_HPP_NAMESPACE::Result result; 12445 do 12446 { 12447 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 12448 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 12449 { 12450 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 12451 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 12452 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 12453 } 12454 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12455 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 12456 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 12457 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 12458 { 12459 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 12460 } 12461 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); 12462 } 12463 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12464 12465 //=== VK_KHR_external_memory_capabilities === 12466 12467 template <typename Dispatch> getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const12468 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 12469 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 12470 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12471 { 12472 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12473 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, 12474 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 12475 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 12476 } 12477 12478 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12479 template <typename Dispatch> 12480 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const12481 PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, 12482 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12483 { 12484 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12485 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12486 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferPropertiesKHR && 12487 "Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" ); 12488 # endif 12489 12490 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 12491 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, 12492 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 12493 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 12494 12495 return externalBufferProperties; 12496 } 12497 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12498 12499 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 12500 //=== VK_KHR_external_memory_win32 === 12501 12502 template <typename Dispatch> getMemoryWin32HandleKHR(const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const12503 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, 12504 HANDLE * pHandle, 12505 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12506 { 12507 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12508 return static_cast<Result>( 12509 d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 12510 } 12511 12512 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12513 template <typename Dispatch> 12514 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR(const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const12515 Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 12516 { 12517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12518 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12519 VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleKHR && "Function <vkGetMemoryWin32HandleKHR> requires <VK_KHR_external_memory_win32>" ); 12520 # endif 12521 12522 HANDLE handle; 12523 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12524 d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 12525 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); 12526 12527 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 12528 } 12529 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12530 12531 template <typename Dispatch> 12532 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,Dispatch const & d) const12533 Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 12534 HANDLE handle, 12535 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, 12536 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12537 { 12538 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12539 return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, 12540 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 12541 handle, 12542 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) ); 12543 } 12544 12545 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12546 template <typename Dispatch> 12547 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,Dispatch const & d) const12548 Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const 12549 { 12550 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12551 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12552 VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandlePropertiesKHR && "Function <vkGetMemoryWin32HandlePropertiesKHR> requires <VK_KHR_external_memory_win32>" ); 12553 # endif 12554 12555 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; 12556 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12557 d.vkGetMemoryWin32HandlePropertiesKHR( m_device, 12558 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 12559 handle, 12560 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) ); 12561 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); 12562 12563 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryWin32HandleProperties ) ); 12564 } 12565 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12566 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 12567 12568 //=== VK_KHR_external_memory_fd === 12569 12570 template <typename Dispatch> getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const12571 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, 12572 int * pFd, 12573 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12574 { 12575 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12576 return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 12577 } 12578 12579 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12580 template <typename Dispatch> getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,Dispatch const & d) const12581 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, 12582 Dispatch const & d ) const 12583 { 12584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12585 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12586 VULKAN_HPP_ASSERT( d.vkGetMemoryFdKHR && "Function <vkGetMemoryFdKHR> requires <VK_KHR_external_memory_fd>" ); 12587 # endif 12588 12589 int fd; 12590 VULKAN_HPP_NAMESPACE::Result result = 12591 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 12592 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); 12593 12594 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); 12595 } 12596 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12597 12598 template <typename Dispatch> getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,Dispatch const & d) const12599 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 12600 int fd, 12601 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, 12602 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12603 { 12604 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12605 return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( 12606 m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) ); 12607 } 12608 12609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12610 template <typename Dispatch> 12611 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,Dispatch const & d) const12612 Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const 12613 { 12614 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12615 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12616 VULKAN_HPP_ASSERT( d.vkGetMemoryFdPropertiesKHR && "Function <vkGetMemoryFdPropertiesKHR> requires <VK_KHR_external_memory_fd>" ); 12617 # endif 12618 12619 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; 12620 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdPropertiesKHR( 12621 m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) ); 12622 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); 12623 12624 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryFdProperties ) ); 12625 } 12626 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12627 12628 //=== VK_KHR_external_semaphore_capabilities === 12629 12630 template <typename Dispatch> 12631 VULKAN_HPP_INLINE void getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const12632 PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 12633 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 12634 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12635 { 12636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12637 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, 12638 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 12639 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 12640 } 12641 12642 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12643 template <typename Dispatch> 12644 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const12645 PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, 12646 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12647 { 12648 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12649 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12650 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && 12651 "Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" ); 12652 # endif 12653 12654 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 12655 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, 12656 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 12657 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 12658 12659 return externalSemaphoreProperties; 12660 } 12661 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12662 12663 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 12664 //=== VK_KHR_external_semaphore_win32 === 12665 12666 template <typename Dispatch> importSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,Dispatch const & d) const12667 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( 12668 const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12669 { 12670 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12671 return static_cast<Result>( 12672 d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) ); 12673 } 12674 12675 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12676 template <typename Dispatch> 12677 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,Dispatch const & d) const12678 Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, 12679 Dispatch const & d ) const 12680 { 12681 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12682 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12683 VULKAN_HPP_ASSERT( d.vkImportSemaphoreWin32HandleKHR && "Function <vkImportSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" ); 12684 # endif 12685 12686 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12687 d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) ); 12688 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); 12689 12690 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 12691 } 12692 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12693 12694 template <typename Dispatch> getSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const12695 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( 12696 const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12697 { 12698 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12699 return static_cast<Result>( 12700 d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 12701 } 12702 12703 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12704 template <typename Dispatch> 12705 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const12706 Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 12707 { 12708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12709 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12710 VULKAN_HPP_ASSERT( d.vkGetSemaphoreWin32HandleKHR && "Function <vkGetSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" ); 12711 # endif 12712 12713 HANDLE handle; 12714 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12715 d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 12716 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); 12717 12718 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 12719 } 12720 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12721 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 12722 12723 //=== VK_KHR_external_semaphore_fd === 12724 12725 template <typename Dispatch> importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,Dispatch const & d) const12726 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, 12727 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12728 { 12729 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12730 return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) ); 12731 } 12732 12733 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12734 template <typename Dispatch> 12735 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,Dispatch const & d) const12736 Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const 12737 { 12738 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12739 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12740 VULKAN_HPP_ASSERT( d.vkImportSemaphoreFdKHR && "Function <vkImportSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" ); 12741 # endif 12742 12743 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12744 d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) ); 12745 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); 12746 12747 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 12748 } 12749 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12750 12751 template <typename Dispatch> getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const12752 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, 12753 int * pFd, 12754 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12755 { 12756 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12757 return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 12758 } 12759 12760 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12761 template <typename Dispatch> 12762 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo,Dispatch const & d) const12763 Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const 12764 { 12765 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12766 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12767 VULKAN_HPP_ASSERT( d.vkGetSemaphoreFdKHR && "Function <vkGetSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" ); 12768 # endif 12769 12770 int fd; 12771 VULKAN_HPP_NAMESPACE::Result result = 12772 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 12773 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); 12774 12775 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); 12776 } 12777 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12778 12779 //=== VK_KHR_push_descriptor === 12780 12781 template <typename Dispatch> pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,Dispatch const & d) const12782 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 12783 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 12784 uint32_t set, 12785 uint32_t descriptorWriteCount, 12786 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 12787 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12788 { 12789 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12790 d.vkCmdPushDescriptorSetKHR( m_commandBuffer, 12791 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 12792 static_cast<VkPipelineLayout>( layout ), 12793 set, 12794 descriptorWriteCount, 12795 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); 12796 } 12797 12798 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12799 template <typename Dispatch> 12800 VULKAN_HPP_INLINE void pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,Dispatch const & d) const12801 CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 12802 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 12803 uint32_t set, 12804 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 12805 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12806 { 12807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12808 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12809 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetKHR && "Function <vkCmdPushDescriptorSetKHR> requires <VK_KHR_push_descriptor>" ); 12810 # endif 12811 12812 d.vkCmdPushDescriptorSetKHR( m_commandBuffer, 12813 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 12814 static_cast<VkPipelineLayout>( layout ), 12815 set, 12816 descriptorWrites.size(), 12817 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); 12818 } 12819 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12820 12821 template <typename Dispatch> pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,const void * pData,Dispatch const & d) const12822 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 12823 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 12824 uint32_t set, 12825 const void * pData, 12826 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12827 { 12828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12829 d.vkCmdPushDescriptorSetWithTemplateKHR( 12830 m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData ); 12831 } 12832 12833 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12834 template <typename DataType, typename Dispatch> pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,DataType const & data,Dispatch const & d) const12835 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 12836 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 12837 uint32_t set, 12838 DataType const & data, 12839 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12840 { 12841 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12842 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12843 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplateKHR && 12844 "Function <vkCmdPushDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor>" ); 12845 # endif 12846 12847 d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, 12848 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 12849 static_cast<VkPipelineLayout>( layout ), 12850 set, 12851 reinterpret_cast<const void *>( &data ) ); 12852 } 12853 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12854 12855 //=== VK_EXT_conditional_rendering === 12856 12857 template <typename Dispatch> beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,Dispatch const & d) const12858 VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, 12859 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12860 { 12861 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12862 d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) ); 12863 } 12864 12865 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12866 template <typename Dispatch> beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,Dispatch const & d) const12867 VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, 12868 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12869 { 12870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12871 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12872 VULKAN_HPP_ASSERT( d.vkCmdBeginConditionalRenderingEXT && "Function <vkCmdBeginConditionalRenderingEXT> requires <VK_EXT_conditional_rendering>" ); 12873 # endif 12874 12875 d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) ); 12876 } 12877 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12878 12879 template <typename Dispatch> endConditionalRenderingEXT(Dispatch const & d) const12880 VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12881 { 12882 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12883 d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); 12884 } 12885 12886 //=== VK_KHR_descriptor_update_template === 12887 12888 template <typename Dispatch> 12889 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const12890 Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 12891 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12892 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 12893 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12894 { 12895 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12896 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, 12897 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 12898 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12899 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 12900 } 12901 12902 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12903 template <typename Dispatch> 12904 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12905 Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 12906 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12907 Dispatch const & d ) const 12908 { 12909 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12910 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12911 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && 12912 "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 12913 # endif 12914 12915 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 12916 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR( 12917 m_device, 12918 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 12919 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12920 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 12921 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); 12922 12923 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); 12924 } 12925 12926 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12927 template <typename Dispatch> 12928 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateKHRUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12929 Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 12930 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12931 Dispatch const & d ) const 12932 { 12933 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12934 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12935 VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && 12936 "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 12937 # endif 12938 12939 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 12940 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR( 12941 m_device, 12942 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 12943 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12944 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 12945 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); 12946 12947 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 12948 UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 12949 descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 12950 } 12951 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12952 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12953 12954 template <typename Dispatch> destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const12955 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 12956 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12957 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12958 { 12959 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12960 d.vkDestroyDescriptorUpdateTemplateKHR( 12961 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 12962 } 12963 12964 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12965 template <typename Dispatch> destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12966 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 12967 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12968 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12969 { 12970 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12971 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 12972 VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplateKHR && 12973 "Function <vkDestroyDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 12974 # endif 12975 12976 d.vkDestroyDescriptorUpdateTemplateKHR( 12977 m_device, 12978 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 12979 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 12980 } 12981 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12982 12983 template <typename Dispatch> updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const12984 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 12985 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 12986 const void * pData, 12987 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12988 { 12989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12990 d.vkUpdateDescriptorSetWithTemplateKHR( 12991 m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); 12992 } 12993 12994 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12995 template <typename DataType, typename Dispatch> updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,DataType const & data,Dispatch const & d) const12996 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 12997 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 12998 DataType const & data, 12999 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13000 { 13001 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13002 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13003 VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplateKHR && 13004 "Function <vkUpdateDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); 13005 # endif 13006 13007 d.vkUpdateDescriptorSetWithTemplateKHR( m_device, 13008 static_cast<VkDescriptorSet>( descriptorSet ), 13009 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 13010 reinterpret_cast<const void *>( &data ) ); 13011 } 13012 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13013 13014 //=== VK_NV_clip_space_w_scaling === 13015 13016 template <typename Dispatch> setViewportWScalingNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,Dispatch const & d) const13017 VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, 13018 uint32_t viewportCount, 13019 const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, 13020 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13021 { 13022 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13023 d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) ); 13024 } 13025 13026 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13027 template <typename Dispatch> 13028 VULKAN_HPP_INLINE void setViewportWScalingNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,Dispatch const & d) const13029 CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, 13030 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, 13031 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13032 { 13033 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13034 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13035 VULKAN_HPP_ASSERT( d.vkCmdSetViewportWScalingNV && "Function <vkCmdSetViewportWScalingNV> requires <VK_NV_clip_space_w_scaling>" ); 13036 # endif 13037 13038 d.vkCmdSetViewportWScalingNV( 13039 m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) ); 13040 } 13041 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13042 13043 //=== VK_EXT_direct_mode_display === 13044 13045 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 13046 template <typename Dispatch> releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13047 VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13048 { 13049 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13050 return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 13051 } 13052 #else 13053 template <typename Dispatch> releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13054 VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13055 { 13056 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13057 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13058 VULKAN_HPP_ASSERT( d.vkReleaseDisplayEXT && "Function <vkReleaseDisplayEXT> requires <VK_EXT_direct_mode_display>" ); 13059 # endif 13060 13061 d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ); 13062 } 13063 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13064 13065 #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) 13066 //=== VK_EXT_acquire_xlib_display === 13067 13068 template <typename Dispatch> acquireXlibDisplayEXT(Display * dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13069 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, 13070 VULKAN_HPP_NAMESPACE::DisplayKHR display, 13071 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13072 { 13073 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13074 return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) ); 13075 } 13076 13077 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13078 template <typename Dispatch> 13079 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireXlibDisplayEXT(Display & dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13080 PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 13081 { 13082 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13083 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13084 VULKAN_HPP_ASSERT( d.vkAcquireXlibDisplayEXT && "Function <vkAcquireXlibDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); 13085 # endif 13086 13087 VULKAN_HPP_NAMESPACE::Result result = 13088 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) ); 13089 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); 13090 13091 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13092 } 13093 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13094 13095 template <typename Dispatch> getRandROutputDisplayEXT(Display * dpy,RROutput rrOutput,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const13096 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, 13097 RROutput rrOutput, 13098 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 13099 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13100 { 13101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13102 return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 13103 } 13104 13105 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13106 template <typename Dispatch> 13107 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT(Display & dpy,RROutput rrOutput,Dispatch const & d) const13108 PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 13109 { 13110 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13111 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13112 VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); 13113 # endif 13114 13115 VULKAN_HPP_NAMESPACE::DisplayKHR display; 13116 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13117 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 13118 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); 13119 13120 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); 13121 } 13122 13123 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13124 template <typename Dispatch> 13125 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getRandROutputDisplayEXTUnique(Display & dpy,RROutput rrOutput,Dispatch const & d) const13126 PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 13127 { 13128 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13129 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13130 VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); 13131 # endif 13132 13133 VULKAN_HPP_NAMESPACE::DisplayKHR display; 13134 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13135 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 13136 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); 13137 13138 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 13139 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 13140 } 13141 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13142 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13143 #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ 13144 13145 //=== VK_EXT_display_surface_counter === 13146 13147 template <typename Dispatch> 13148 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,Dispatch const & d) const13149 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 13150 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, 13151 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13152 { 13153 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13154 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( 13155 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) ); 13156 } 13157 13158 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13159 template <typename Dispatch> 13160 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const13161 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 13162 { 13163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13164 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13165 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT && 13166 "Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> requires <VK_EXT_display_surface_counter>" ); 13167 # endif 13168 13169 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; 13170 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( 13171 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) ); 13172 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); 13173 13174 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); 13175 } 13176 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13177 13178 //=== VK_EXT_display_control === 13179 13180 template <typename Dispatch> displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,Dispatch const & d) const13181 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13182 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, 13183 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13184 { 13185 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13186 return static_cast<Result>( 13187 d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) ); 13188 } 13189 13190 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13191 template <typename Dispatch> displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,Dispatch const & d) const13192 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13193 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, 13194 Dispatch const & d ) const 13195 { 13196 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13197 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13198 VULKAN_HPP_ASSERT( d.vkDisplayPowerControlEXT && "Function <vkDisplayPowerControlEXT> requires <VK_EXT_display_control>" ); 13199 # endif 13200 13201 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13202 d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) ); 13203 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); 13204 13205 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13206 } 13207 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13208 13209 template <typename Dispatch> registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const13210 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, 13211 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13212 VULKAN_HPP_NAMESPACE::Fence * pFence, 13213 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13214 { 13215 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13216 return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, 13217 reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), 13218 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13219 reinterpret_cast<VkFence *>( pFence ) ) ); 13220 } 13221 13222 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13223 template <typename Dispatch> 13224 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13225 Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, 13226 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13227 Dispatch const & d ) const 13228 { 13229 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13230 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13231 VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" ); 13232 # endif 13233 13234 VULKAN_HPP_NAMESPACE::Fence fence; 13235 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT( 13236 m_device, 13237 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 13238 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13239 reinterpret_cast<VkFence *>( &fence ) ) ); 13240 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); 13241 13242 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); 13243 } 13244 13245 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13246 template <typename Dispatch> 13247 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerEventEXTUnique(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13248 Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, 13249 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13250 Dispatch const & d ) const 13251 { 13252 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13253 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13254 VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" ); 13255 # endif 13256 13257 VULKAN_HPP_NAMESPACE::Fence fence; 13258 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT( 13259 m_device, 13260 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 13261 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13262 reinterpret_cast<VkFence *>( &fence ) ) ); 13263 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); 13264 13265 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 13266 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 13267 } 13268 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13269 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13270 13271 template <typename Dispatch> registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const13272 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13273 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, 13274 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13275 VULKAN_HPP_NAMESPACE::Fence * pFence, 13276 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13277 { 13278 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13279 return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, 13280 static_cast<VkDisplayKHR>( display ), 13281 reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), 13282 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13283 reinterpret_cast<VkFence *>( pFence ) ) ); 13284 } 13285 13286 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13287 template <typename Dispatch> 13288 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13289 Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13290 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, 13291 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13292 Dispatch const & d ) const 13293 { 13294 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13295 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13296 VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" ); 13297 # endif 13298 13299 VULKAN_HPP_NAMESPACE::Fence fence; 13300 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT( 13301 m_device, 13302 static_cast<VkDisplayKHR>( display ), 13303 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 13304 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13305 reinterpret_cast<VkFence *>( &fence ) ) ); 13306 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); 13307 13308 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); 13309 } 13310 13311 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13312 template <typename Dispatch> 13313 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerDisplayEventEXTUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13314 Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13315 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, 13316 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13317 Dispatch const & d ) const 13318 { 13319 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13320 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13321 VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" ); 13322 # endif 13323 13324 VULKAN_HPP_NAMESPACE::Fence fence; 13325 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT( 13326 m_device, 13327 static_cast<VkDisplayKHR>( display ), 13328 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 13329 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13330 reinterpret_cast<VkFence *>( &fence ) ) ); 13331 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); 13332 13333 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 13334 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 13335 } 13336 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13337 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13338 13339 template <typename Dispatch> getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,uint64_t * pCounterValue,Dispatch const & d) const13340 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13341 VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, 13342 uint64_t * pCounterValue, 13343 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13344 { 13345 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13346 return static_cast<Result>( 13347 d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) ); 13348 } 13349 13350 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13351 template <typename Dispatch> getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,Dispatch const & d) const13352 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( 13353 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const 13354 { 13355 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13356 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13357 VULKAN_HPP_ASSERT( d.vkGetSwapchainCounterEXT && "Function <vkGetSwapchainCounterEXT> requires <VK_EXT_display_control>" ); 13358 # endif 13359 13360 uint64_t counterValue; 13361 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13362 d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) ); 13363 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); 13364 13365 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( counterValue ) ); 13366 } 13367 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13368 13369 //=== VK_GOOGLE_display_timing === 13370 13371 template <typename Dispatch> 13372 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,Dispatch const & d) const13373 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13374 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, 13375 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13376 { 13377 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13378 return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( 13379 m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) ); 13380 } 13381 13382 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13383 template <typename Dispatch> 13384 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const13385 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 13386 { 13387 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13388 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13389 VULKAN_HPP_ASSERT( d.vkGetRefreshCycleDurationGOOGLE && "Function <vkGetRefreshCycleDurationGOOGLE> requires <VK_GOOGLE_display_timing>" ); 13390 # endif 13391 13392 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; 13393 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRefreshCycleDurationGOOGLE( 13394 m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) ); 13395 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); 13396 13397 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displayTimingProperties ) ); 13398 } 13399 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13400 13401 template <typename Dispatch> 13402 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pPresentationTimingCount,VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,Dispatch const & d) const13403 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13404 uint32_t * pPresentationTimingCount, 13405 VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, 13406 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13407 { 13408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13409 return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, 13410 static_cast<VkSwapchainKHR>( swapchain ), 13411 pPresentationTimingCount, 13412 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) ); 13413 } 13414 13415 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13416 template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch> 13417 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13418 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const13419 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 13420 { 13421 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13422 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13423 VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" ); 13424 # endif 13425 13426 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings; 13427 uint32_t presentationTimingCount; 13428 VULKAN_HPP_NAMESPACE::Result result; 13429 do 13430 { 13431 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13432 d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 13433 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) 13434 { 13435 presentationTimings.resize( presentationTimingCount ); 13436 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13437 d.vkGetPastPresentationTimingGOOGLE( m_device, 13438 static_cast<VkSwapchainKHR>( swapchain ), 13439 &presentationTimingCount, 13440 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 13441 } 13442 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13443 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 13444 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 13445 if ( presentationTimingCount < presentationTimings.size() ) 13446 { 13447 presentationTimings.resize( presentationTimingCount ); 13448 } 13449 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); 13450 } 13451 13452 template < 13453 typename PastPresentationTimingGOOGLEAllocator, 13454 typename Dispatch, 13455 typename std::enable_if<std::is_same<typename PastPresentationTimingGOOGLEAllocator::value_type, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, 13456 int>::type> 13457 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13458 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,Dispatch const & d) const13459 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13460 PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, 13461 Dispatch const & d ) const 13462 { 13463 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13464 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13465 VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" ); 13466 # endif 13467 13468 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings( 13469 pastPresentationTimingGOOGLEAllocator ); 13470 uint32_t presentationTimingCount; 13471 VULKAN_HPP_NAMESPACE::Result result; 13472 do 13473 { 13474 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13475 d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 13476 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) 13477 { 13478 presentationTimings.resize( presentationTimingCount ); 13479 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13480 d.vkGetPastPresentationTimingGOOGLE( m_device, 13481 static_cast<VkSwapchainKHR>( swapchain ), 13482 &presentationTimingCount, 13483 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 13484 } 13485 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13486 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 13487 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 13488 if ( presentationTimingCount < presentationTimings.size() ) 13489 { 13490 presentationTimings.resize( presentationTimingCount ); 13491 } 13492 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); 13493 } 13494 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13495 13496 //=== VK_EXT_discard_rectangles === 13497 13498 template <typename Dispatch> setDiscardRectangleEXT(uint32_t firstDiscardRectangle,uint32_t discardRectangleCount,const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,Dispatch const & d) const13499 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 13500 uint32_t discardRectangleCount, 13501 const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, 13502 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13503 { 13504 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13505 d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) ); 13506 } 13507 13508 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13509 template <typename Dispatch> setDiscardRectangleEXT(uint32_t firstDiscardRectangle,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,Dispatch const & d) const13510 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 13511 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, 13512 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13513 { 13514 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13515 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13516 VULKAN_HPP_ASSERT( d.vkCmdSetDiscardRectangleEXT && "Function <vkCmdSetDiscardRectangleEXT> requires <VK_EXT_discard_rectangles>" ); 13517 # endif 13518 13519 d.vkCmdSetDiscardRectangleEXT( 13520 m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) ); 13521 } 13522 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13523 13524 template <typename Dispatch> setDiscardRectangleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable,Dispatch const & d) const13525 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, 13526 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13527 { 13528 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13529 d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast<VkBool32>( discardRectangleEnable ) ); 13530 } 13531 13532 template <typename Dispatch> setDiscardRectangleModeEXT(VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode,Dispatch const & d) const13533 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, 13534 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13535 { 13536 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13537 d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) ); 13538 } 13539 13540 //=== VK_EXT_hdr_metadata === 13541 13542 template <typename Dispatch> setHdrMetadataEXT(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,Dispatch const & d) const13543 VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, 13544 const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 13545 const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, 13546 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13547 { 13548 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13549 d.vkSetHdrMetadataEXT( 13550 m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) ); 13551 } 13552 13553 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13554 template <typename Dispatch> setHdrMetadataEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,Dispatch const & d) const13555 VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, 13556 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, 13557 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 13558 { 13559 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13560 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13561 VULKAN_HPP_ASSERT( d.vkSetHdrMetadataEXT && "Function <vkSetHdrMetadataEXT> requires <VK_EXT_hdr_metadata>" ); 13562 # endif 13563 # ifdef VULKAN_HPP_NO_EXCEPTIONS 13564 VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); 13565 # else 13566 if ( swapchains.size() != metadata.size() ) 13567 { 13568 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); 13569 } 13570 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 13571 13572 d.vkSetHdrMetadataEXT( m_device, 13573 swapchains.size(), 13574 reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), 13575 reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) ); 13576 } 13577 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13578 13579 //=== VK_KHR_create_renderpass2 === 13580 13581 template <typename Dispatch> createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const13582 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 13583 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13584 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 13585 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13586 { 13587 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13588 return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, 13589 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 13590 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13591 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 13592 } 13593 13594 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13595 template <typename Dispatch> 13596 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13597 Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 13598 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13599 Dispatch const & d ) const 13600 { 13601 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13602 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13603 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 13604 # endif 13605 13606 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 13607 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13608 d.vkCreateRenderPass2KHR( m_device, 13609 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 13610 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13611 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 13612 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); 13613 13614 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); 13615 } 13616 13617 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13618 template <typename Dispatch> 13619 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2KHRUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13620 Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 13621 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13622 Dispatch const & d ) const 13623 { 13624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13625 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13626 VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 13627 # endif 13628 13629 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 13630 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13631 d.vkCreateRenderPass2KHR( m_device, 13632 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 13633 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13634 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 13635 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); 13636 13637 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 13638 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 13639 } 13640 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13641 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13642 13643 template <typename Dispatch> beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const13644 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 13645 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 13646 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13647 { 13648 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13649 d.vkCmdBeginRenderPass2KHR( 13650 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 13651 } 13652 13653 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13654 template <typename Dispatch> beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const13655 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 13656 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 13657 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13658 { 13659 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13660 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13661 VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2KHR && "Function <vkCmdBeginRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 13662 # endif 13663 13664 d.vkCmdBeginRenderPass2KHR( 13665 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 13666 } 13667 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13668 13669 template <typename Dispatch> nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const13670 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 13671 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 13672 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13673 { 13674 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13675 d.vkCmdNextSubpass2KHR( 13676 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 13677 } 13678 13679 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13680 template <typename Dispatch> nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const13681 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 13682 const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 13683 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13684 { 13685 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13686 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13687 VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2KHR && "Function <vkCmdNextSubpass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 13688 # endif 13689 13690 d.vkCmdNextSubpass2KHR( 13691 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 13692 } 13693 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13694 13695 template <typename Dispatch> endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const13696 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 13697 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13698 { 13699 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13700 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 13701 } 13702 13703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13704 template <typename Dispatch> endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const13705 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 13706 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13707 { 13708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13709 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13710 VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2KHR && "Function <vkCmdEndRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); 13711 # endif 13712 13713 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 13714 } 13715 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13716 13717 //=== VK_KHR_shared_presentable_image === 13718 13719 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 13720 template <typename Dispatch> getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const13721 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13722 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13723 { 13724 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13725 return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 13726 } 13727 #else 13728 template <typename Dispatch> getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const13729 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 13730 Dispatch const & d ) const 13731 { 13732 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13733 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13734 VULKAN_HPP_ASSERT( d.vkGetSwapchainStatusKHR && "Function <vkGetSwapchainStatusKHR> requires <VK_KHR_shared_presentable_image>" ); 13735 # endif 13736 13737 VULKAN_HPP_NAMESPACE::Result result = 13738 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 13739 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 13740 VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", 13741 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 13742 13743 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 13744 } 13745 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13746 13747 //=== VK_KHR_external_fence_capabilities === 13748 13749 template <typename Dispatch> getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const13750 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 13751 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 13752 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13753 { 13754 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13755 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, 13756 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 13757 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 13758 } 13759 13760 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13761 template <typename Dispatch> 13762 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const13763 PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, 13764 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13765 { 13766 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13767 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13768 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFencePropertiesKHR && 13769 "Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" ); 13770 # endif 13771 13772 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 13773 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, 13774 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 13775 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 13776 13777 return externalFenceProperties; 13778 } 13779 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13780 13781 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 13782 //=== VK_KHR_external_fence_win32 === 13783 13784 template <typename Dispatch> importFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,Dispatch const & d) const13785 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( 13786 const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13787 { 13788 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13789 return static_cast<Result>( 13790 d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) ); 13791 } 13792 13793 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13794 template <typename Dispatch> 13795 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,Dispatch const & d) const13796 Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const 13797 { 13798 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13799 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13800 VULKAN_HPP_ASSERT( d.vkImportFenceWin32HandleKHR && "Function <vkImportFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" ); 13801 # endif 13802 13803 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13804 d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) ); 13805 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); 13806 13807 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13808 } 13809 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13810 13811 template <typename Dispatch> getFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const13812 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, 13813 HANDLE * pHandle, 13814 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13815 { 13816 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13817 return static_cast<Result>( 13818 d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 13819 } 13820 13821 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13822 template <typename Dispatch> 13823 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const13824 Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 13825 { 13826 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13827 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13828 VULKAN_HPP_ASSERT( d.vkGetFenceWin32HandleKHR && "Function <vkGetFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" ); 13829 # endif 13830 13831 HANDLE handle; 13832 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13833 d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 13834 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); 13835 13836 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); 13837 } 13838 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13839 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 13840 13841 //=== VK_KHR_external_fence_fd === 13842 13843 template <typename Dispatch> importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,Dispatch const & d) const13844 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, 13845 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13846 { 13847 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13848 return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) ); 13849 } 13850 13851 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13852 template <typename Dispatch> 13853 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo,Dispatch const & d) const13854 Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const 13855 { 13856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13857 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13858 VULKAN_HPP_ASSERT( d.vkImportFenceFdKHR && "Function <vkImportFenceFdKHR> requires <VK_KHR_external_fence_fd>" ); 13859 # endif 13860 13861 VULKAN_HPP_NAMESPACE::Result result = 13862 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) ); 13863 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); 13864 13865 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 13866 } 13867 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13868 13869 template <typename Dispatch> getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const13870 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, 13871 int * pFd, 13872 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13873 { 13874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13875 return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 13876 } 13877 13878 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13879 template <typename Dispatch> getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,Dispatch const & d) const13880 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, 13881 Dispatch const & d ) const 13882 { 13883 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13884 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13885 VULKAN_HPP_ASSERT( d.vkGetFenceFdKHR && "Function <vkGetFenceFdKHR> requires <VK_KHR_external_fence_fd>" ); 13886 # endif 13887 13888 int fd; 13889 VULKAN_HPP_NAMESPACE::Result result = 13890 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 13891 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); 13892 13893 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); 13894 } 13895 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13896 13897 //=== VK_KHR_performance_query === 13898 13899 template <typename Dispatch> 13900 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,uint32_t * pCounterCount,VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,Dispatch const & d) const13901 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, 13902 uint32_t * pCounterCount, 13903 VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, 13904 VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, 13905 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13906 { 13907 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13908 return static_cast<Result>( 13909 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, 13910 queueFamilyIndex, 13911 pCounterCount, 13912 reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), 13913 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) ); 13914 } 13915 13916 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13917 template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch> 13918 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13919 typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 13920 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,Dispatch const & d) const13921 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const 13922 { 13923 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13924 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13925 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && 13926 "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" ); 13927 # endif 13928 13929 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 13930 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 13931 data_; 13932 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; 13933 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; 13934 uint32_t counterCount; 13935 VULKAN_HPP_NAMESPACE::Result result; 13936 do 13937 { 13938 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13939 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 13940 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) 13941 { 13942 counters.resize( counterCount ); 13943 counterDescriptions.resize( counterCount ); 13944 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 13945 m_physicalDevice, 13946 queueFamilyIndex, 13947 &counterCount, 13948 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 13949 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 13950 } 13951 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13952 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 13953 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 13954 if ( counterCount < counters.size() ) 13955 { 13956 counters.resize( counterCount ); 13957 counterDescriptions.resize( counterCount ); 13958 } 13959 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 13960 } 13961 13962 template <typename PerformanceCounterKHRAllocator, 13963 typename PerformanceCounterDescriptionKHRAllocator, 13964 typename Dispatch, 13965 typename std::enable_if< 13966 std::is_same<typename PerformanceCounterKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value && 13967 std::is_same<typename PerformanceCounterDescriptionKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, 13968 int>::type> 13969 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13970 typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 13971 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,Dispatch const & d) const13972 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, 13973 PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, 13974 PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, 13975 Dispatch const & d ) const 13976 { 13977 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13978 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 13979 VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && 13980 "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" ); 13981 # endif 13982 13983 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 13984 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 13985 data_( 13986 std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); 13987 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; 13988 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; 13989 uint32_t counterCount; 13990 VULKAN_HPP_NAMESPACE::Result result; 13991 do 13992 { 13993 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13994 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 13995 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) 13996 { 13997 counters.resize( counterCount ); 13998 counterDescriptions.resize( counterCount ); 13999 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 14000 m_physicalDevice, 14001 queueFamilyIndex, 14002 &counterCount, 14003 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 14004 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 14005 } 14006 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14007 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 14008 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 14009 if ( counterCount < counters.size() ) 14010 { 14011 counters.resize( counterCount ); 14012 counterDescriptions.resize( counterCount ); 14013 } 14014 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 14015 } 14016 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14017 14018 template <typename Dispatch> 14019 VULKAN_HPP_INLINE void getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,uint32_t * pNumPasses,Dispatch const & d) const14020 PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, 14021 uint32_t * pNumPasses, 14022 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14023 { 14024 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14025 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( 14026 m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses ); 14027 } 14028 14029 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14030 template <typename Dispatch> getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,Dispatch const & d) const14031 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( 14032 const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14033 { 14034 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14035 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14036 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && 14037 "Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> requires <VK_KHR_performance_query>" ); 14038 # endif 14039 14040 uint32_t numPasses; 14041 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( 14042 m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses ); 14043 14044 return numPasses; 14045 } 14046 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14047 14048 template <typename Dispatch> acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,Dispatch const & d) const14049 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, 14050 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14051 { 14052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14053 return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) ); 14054 } 14055 14056 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14057 template <typename Dispatch> 14058 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info,Dispatch const & d) const14059 Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const 14060 { 14061 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14062 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14063 VULKAN_HPP_ASSERT( d.vkAcquireProfilingLockKHR && "Function <vkAcquireProfilingLockKHR> requires <VK_KHR_performance_query>" ); 14064 # endif 14065 14066 VULKAN_HPP_NAMESPACE::Result result = 14067 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) ); 14068 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); 14069 14070 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14071 } 14072 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14073 14074 template <typename Dispatch> releaseProfilingLockKHR(Dispatch const & d) const14075 VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14076 { 14077 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14078 d.vkReleaseProfilingLockKHR( m_device ); 14079 } 14080 14081 //=== VK_KHR_get_surface_capabilities2 === 14082 14083 template <typename Dispatch> 14084 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,Dispatch const & d) const14085 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 14086 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, 14087 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14088 { 14089 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14090 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 14091 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 14092 reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) ); 14093 } 14094 14095 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14096 template <typename Dispatch> 14097 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const14098 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 14099 { 14100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14101 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14102 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && 14103 "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14104 # endif 14105 14106 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; 14107 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14108 d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 14109 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14110 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 14111 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 14112 14113 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); 14114 } 14115 14116 template <typename X, typename Y, typename... Z, typename Dispatch> 14117 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const14118 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 14119 { 14120 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14121 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14122 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && 14123 "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14124 # endif 14125 14126 StructureChain<X, Y, Z...> structureChain; 14127 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>(); 14128 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14129 d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 14130 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14131 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 14132 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 14133 14134 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 14135 } 14136 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14137 14138 template <typename Dispatch> getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,Dispatch const & d) const14139 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 14140 uint32_t * pSurfaceFormatCount, 14141 VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, 14142 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14143 { 14144 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14145 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14146 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 14147 pSurfaceFormatCount, 14148 reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) ); 14149 } 14150 14151 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14152 template <typename SurfaceFormat2KHRAllocator, typename Dispatch> 14153 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const14154 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 14155 { 14156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14157 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14158 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 14159 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14160 # endif 14161 14162 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats; 14163 uint32_t surfaceFormatCount; 14164 VULKAN_HPP_NAMESPACE::Result result; 14165 do 14166 { 14167 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 14168 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 14169 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 14170 { 14171 surfaceFormats.resize( surfaceFormatCount ); 14172 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14173 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14174 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14175 &surfaceFormatCount, 14176 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 14177 } 14178 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14179 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 14180 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 14181 if ( surfaceFormatCount < surfaceFormats.size() ) 14182 { 14183 surfaceFormats.resize( surfaceFormatCount ); 14184 } 14185 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 14186 } 14187 14188 template <typename SurfaceFormat2KHRAllocator, 14189 typename Dispatch, 14190 typename std::enable_if<std::is_same<typename SurfaceFormat2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, int>::type> 14191 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,Dispatch const & d) const14192 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 14193 SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, 14194 Dispatch const & d ) const 14195 { 14196 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14197 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14198 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 14199 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14200 # endif 14201 14202 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator ); 14203 uint32_t surfaceFormatCount; 14204 VULKAN_HPP_NAMESPACE::Result result; 14205 do 14206 { 14207 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 14208 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 14209 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 14210 { 14211 surfaceFormats.resize( surfaceFormatCount ); 14212 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14213 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14214 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14215 &surfaceFormatCount, 14216 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 14217 } 14218 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14219 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 14220 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 14221 if ( surfaceFormatCount < surfaceFormats.size() ) 14222 { 14223 surfaceFormats.resize( surfaceFormatCount ); 14224 } 14225 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); 14226 } 14227 14228 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 14229 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const14230 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 14231 { 14232 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14233 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14234 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 14235 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14236 # endif 14237 14238 std::vector<StructureChain, StructureChainAllocator> structureChains; 14239 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; 14240 uint32_t surfaceFormatCount; 14241 VULKAN_HPP_NAMESPACE::Result result; 14242 do 14243 { 14244 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 14245 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 14246 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 14247 { 14248 structureChains.resize( surfaceFormatCount ); 14249 surfaceFormats.resize( surfaceFormatCount ); 14250 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 14251 { 14252 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; 14253 } 14254 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14255 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14256 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14257 &surfaceFormatCount, 14258 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 14259 } 14260 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14261 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 14262 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 14263 if ( surfaceFormatCount < surfaceFormats.size() ) 14264 { 14265 structureChains.resize( surfaceFormatCount ); 14266 } 14267 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 14268 { 14269 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; 14270 } 14271 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 14272 } 14273 14274 template <typename StructureChain, 14275 typename StructureChainAllocator, 14276 typename Dispatch, 14277 typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type> 14278 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,StructureChainAllocator & structureChainAllocator,Dispatch const & d) const14279 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 14280 StructureChainAllocator & structureChainAllocator, 14281 Dispatch const & d ) const 14282 { 14283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14284 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14285 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && 14286 "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); 14287 # endif 14288 14289 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 14290 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; 14291 uint32_t surfaceFormatCount; 14292 VULKAN_HPP_NAMESPACE::Result result; 14293 do 14294 { 14295 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 14296 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 14297 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 14298 { 14299 structureChains.resize( surfaceFormatCount ); 14300 surfaceFormats.resize( surfaceFormatCount ); 14301 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 14302 { 14303 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; 14304 } 14305 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14306 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 14307 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 14308 &surfaceFormatCount, 14309 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 14310 } 14311 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14312 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 14313 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 14314 if ( surfaceFormatCount < surfaceFormats.size() ) 14315 { 14316 structureChains.resize( surfaceFormatCount ); 14317 } 14318 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 14319 { 14320 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; 14321 } 14322 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); 14323 } 14324 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14325 14326 //=== VK_KHR_get_display_properties2 === 14327 14328 template <typename Dispatch> getDisplayProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,Dispatch const & d) const14329 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, 14330 VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, 14331 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14332 { 14333 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14334 return static_cast<Result>( 14335 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) ); 14336 } 14337 14338 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14339 template <typename DisplayProperties2KHRAllocator, typename Dispatch> 14340 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14341 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(Dispatch const & d) const14342 PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const 14343 { 14344 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14345 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14346 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && 14347 "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 14348 # endif 14349 14350 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties; 14351 uint32_t propertyCount; 14352 VULKAN_HPP_NAMESPACE::Result result; 14353 do 14354 { 14355 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 14356 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 14357 { 14358 properties.resize( propertyCount ); 14359 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14360 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 14361 } 14362 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14363 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 14364 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 14365 if ( propertyCount < properties.size() ) 14366 { 14367 properties.resize( propertyCount ); 14368 } 14369 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 14370 } 14371 14372 template < 14373 typename DisplayProperties2KHRAllocator, 14374 typename Dispatch, 14375 typename std::enable_if<std::is_same<typename DisplayProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, int>::type> 14376 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14377 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,Dispatch const & d) const14378 PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const 14379 { 14380 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14381 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14382 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && 14383 "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 14384 # endif 14385 14386 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator ); 14387 uint32_t propertyCount; 14388 VULKAN_HPP_NAMESPACE::Result result; 14389 do 14390 { 14391 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 14392 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 14393 { 14394 properties.resize( propertyCount ); 14395 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14396 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 14397 } 14398 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14399 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 14400 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 14401 if ( propertyCount < properties.size() ) 14402 { 14403 properties.resize( propertyCount ); 14404 } 14405 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 14406 } 14407 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14408 14409 template <typename Dispatch> getDisplayPlaneProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,Dispatch const & d) const14410 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, 14411 VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, 14412 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14413 { 14414 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14415 return static_cast<Result>( 14416 d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) ); 14417 } 14418 14419 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14420 template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch> 14421 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14422 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(Dispatch const & d) const14423 PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const 14424 { 14425 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14426 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14427 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && 14428 "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 14429 # endif 14430 14431 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties; 14432 uint32_t propertyCount; 14433 VULKAN_HPP_NAMESPACE::Result result; 14434 do 14435 { 14436 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 14437 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 14438 { 14439 properties.resize( propertyCount ); 14440 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 14441 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 14442 } 14443 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14444 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 14445 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 14446 if ( propertyCount < properties.size() ) 14447 { 14448 properties.resize( propertyCount ); 14449 } 14450 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 14451 } 14452 14453 template < 14454 typename DisplayPlaneProperties2KHRAllocator, 14455 typename Dispatch, 14456 typename std::enable_if<std::is_same<typename DisplayPlaneProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, 14457 int>::type> 14458 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14459 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,Dispatch const & d) const14460 PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const 14461 { 14462 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14463 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14464 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && 14465 "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 14466 # endif 14467 14468 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator ); 14469 uint32_t propertyCount; 14470 VULKAN_HPP_NAMESPACE::Result result; 14471 do 14472 { 14473 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 14474 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 14475 { 14476 properties.resize( propertyCount ); 14477 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 14478 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 14479 } 14480 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14481 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 14482 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 14483 if ( propertyCount < properties.size() ) 14484 { 14485 properties.resize( propertyCount ); 14486 } 14487 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 14488 } 14489 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14490 14491 template <typename Dispatch> getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,Dispatch const & d) const14492 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 14493 uint32_t * pPropertyCount, 14494 VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, 14495 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14496 { 14497 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14498 return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( 14499 m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) ); 14500 } 14501 14502 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14503 template <typename DisplayModeProperties2KHRAllocator, typename Dispatch> 14504 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14505 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const14506 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 14507 { 14508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14509 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14510 VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 14511 # endif 14512 14513 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties; 14514 uint32_t propertyCount; 14515 VULKAN_HPP_NAMESPACE::Result result; 14516 do 14517 { 14518 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14519 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 14520 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 14521 { 14522 properties.resize( propertyCount ); 14523 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 14524 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 14525 } 14526 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14527 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 14528 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 14529 if ( propertyCount < properties.size() ) 14530 { 14531 properties.resize( propertyCount ); 14532 } 14533 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 14534 } 14535 14536 template < 14537 typename DisplayModeProperties2KHRAllocator, 14538 typename Dispatch, 14539 typename std::enable_if<std::is_same<typename DisplayModeProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, 14540 int>::type> 14541 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14542 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,Dispatch const & d) const14543 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 14544 DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, 14545 Dispatch const & d ) const 14546 { 14547 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14548 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14549 VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); 14550 # endif 14551 14552 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator ); 14553 uint32_t propertyCount; 14554 VULKAN_HPP_NAMESPACE::Result result; 14555 do 14556 { 14557 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14558 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 14559 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 14560 { 14561 properties.resize( propertyCount ); 14562 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 14563 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 14564 } 14565 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 14566 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 14567 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 14568 if ( propertyCount < properties.size() ) 14569 { 14570 properties.resize( propertyCount ); 14571 } 14572 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 14573 } 14574 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14575 14576 template <typename Dispatch> 14577 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,Dispatch const & d) const14578 PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, 14579 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, 14580 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14581 { 14582 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14583 return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, 14584 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), 14585 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) ); 14586 } 14587 14588 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14589 template <typename Dispatch> 14590 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo,Dispatch const & d) const14591 PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const 14592 { 14593 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14594 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14595 VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilities2KHR && "Function <vkGetDisplayPlaneCapabilities2KHR> requires <VK_KHR_get_display_properties2>" ); 14596 # endif 14597 14598 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; 14599 VULKAN_HPP_NAMESPACE::Result result = 14600 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, 14601 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), 14602 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) ); 14603 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); 14604 14605 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); 14606 } 14607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14608 14609 #if defined( VK_USE_PLATFORM_IOS_MVK ) 14610 //=== VK_MVK_ios_surface === 14611 14612 template <typename Dispatch> createIOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const14613 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, 14614 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14615 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 14616 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14617 { 14618 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14619 return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, 14620 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), 14621 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 14622 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 14623 } 14624 14625 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14626 template <typename Dispatch> 14627 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14628 Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, 14629 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14630 Dispatch const & d ) const 14631 { 14632 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14633 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14634 VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" ); 14635 # endif 14636 14637 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 14638 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14639 d.vkCreateIOSSurfaceMVK( m_instance, 14640 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 14641 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14642 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 14643 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); 14644 14645 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 14646 } 14647 14648 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14649 template <typename Dispatch> 14650 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createIOSSurfaceMVKUnique(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14651 Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, 14652 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14653 Dispatch const & d ) const 14654 { 14655 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14656 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14657 VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" ); 14658 # endif 14659 14660 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 14661 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14662 d.vkCreateIOSSurfaceMVK( m_instance, 14663 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 14664 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14665 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 14666 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); 14667 14668 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 14669 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 14670 } 14671 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 14672 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14673 #endif /*VK_USE_PLATFORM_IOS_MVK*/ 14674 14675 #if defined( VK_USE_PLATFORM_MACOS_MVK ) 14676 //=== VK_MVK_macos_surface === 14677 14678 template <typename Dispatch> createMacOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const14679 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, 14680 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14681 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 14682 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14683 { 14684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14685 return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, 14686 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), 14687 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 14688 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 14689 } 14690 14691 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14692 template <typename Dispatch> 14693 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14694 Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, 14695 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14696 Dispatch const & d ) const 14697 { 14698 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14699 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14700 VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" ); 14701 # endif 14702 14703 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 14704 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14705 d.vkCreateMacOSSurfaceMVK( m_instance, 14706 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 14707 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14708 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 14709 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); 14710 14711 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 14712 } 14713 14714 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14715 template <typename Dispatch> 14716 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMacOSSurfaceMVKUnique(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14717 Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, 14718 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14719 Dispatch const & d ) const 14720 { 14721 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14722 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14723 VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" ); 14724 # endif 14725 14726 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 14727 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14728 d.vkCreateMacOSSurfaceMVK( m_instance, 14729 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 14730 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14731 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 14732 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); 14733 14734 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 14735 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 14736 } 14737 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 14738 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14739 #endif /*VK_USE_PLATFORM_MACOS_MVK*/ 14740 14741 //=== VK_EXT_debug_utils === 14742 14743 template <typename Dispatch> setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,Dispatch const & d) const14744 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, 14745 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14746 { 14747 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14748 return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) ); 14749 } 14750 14751 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14752 template <typename Dispatch> 14753 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo,Dispatch const & d) const14754 Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 14755 { 14756 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14757 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14758 VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectNameEXT && "Function <vkSetDebugUtilsObjectNameEXT> requires <VK_EXT_debug_utils>" ); 14759 # endif 14760 14761 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14762 d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) ); 14763 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); 14764 14765 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14766 } 14767 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14768 14769 template <typename Dispatch> setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,Dispatch const & d) const14770 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, 14771 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14772 { 14773 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14774 return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) ); 14775 } 14776 14777 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14778 template <typename Dispatch> 14779 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo,Dispatch const & d) const14780 Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 14781 { 14782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14783 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14784 VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectTagEXT && "Function <vkSetDebugUtilsObjectTagEXT> requires <VK_EXT_debug_utils>" ); 14785 # endif 14786 14787 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14788 d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) ); 14789 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); 14790 14791 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 14792 } 14793 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14794 14795 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const14796 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 14797 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14798 { 14799 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14800 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 14801 } 14802 14803 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14804 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const14805 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 14806 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14807 { 14808 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14809 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14810 VULKAN_HPP_ASSERT( d.vkQueueBeginDebugUtilsLabelEXT && "Function <vkQueueBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 14811 # endif 14812 14813 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 14814 } 14815 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14816 14817 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const14818 VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14819 { 14820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14821 d.vkQueueEndDebugUtilsLabelEXT( m_queue ); 14822 } 14823 14824 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const14825 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 14826 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14827 { 14828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14829 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 14830 } 14831 14832 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14833 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const14834 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 14835 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14836 { 14837 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14838 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14839 VULKAN_HPP_ASSERT( d.vkQueueInsertDebugUtilsLabelEXT && "Function <vkQueueInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 14840 # endif 14841 14842 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 14843 } 14844 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14845 14846 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const14847 VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 14848 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14849 { 14850 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14851 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 14852 } 14853 14854 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14855 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const14856 VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 14857 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14858 { 14859 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14860 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14861 VULKAN_HPP_ASSERT( d.vkCmdBeginDebugUtilsLabelEXT && "Function <vkCmdBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 14862 # endif 14863 14864 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 14865 } 14866 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14867 14868 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const14869 VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14870 { 14871 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14872 d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); 14873 } 14874 14875 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const14876 VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 14877 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14878 { 14879 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14880 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 14881 } 14882 14883 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14884 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const14885 VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 14886 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14887 { 14888 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14889 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14890 VULKAN_HPP_ASSERT( d.vkCmdInsertDebugUtilsLabelEXT && "Function <vkCmdInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); 14891 # endif 14892 14893 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 14894 } 14895 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14896 14897 template <typename Dispatch> 14898 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,Dispatch const & d) const14899 Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, 14900 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14901 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, 14902 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14903 { 14904 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14905 return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, 14906 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), 14907 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 14908 reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) ); 14909 } 14910 14911 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14912 template <typename Dispatch> 14913 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14914 Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, 14915 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14916 Dispatch const & d ) const 14917 { 14918 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14919 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14920 VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 14921 # endif 14922 14923 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 14924 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT( 14925 m_instance, 14926 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 14927 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14928 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 14929 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); 14930 14931 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( messenger ) ); 14932 } 14933 14934 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14935 template <typename Dispatch> 14936 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type createDebugUtilsMessengerEXTUnique(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14937 Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, 14938 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14939 Dispatch const & d ) const 14940 { 14941 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14942 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14943 VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 14944 # endif 14945 14946 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 14947 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT( 14948 m_instance, 14949 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 14950 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14951 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 14952 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); 14953 14954 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 14955 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( messenger, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 14956 } 14957 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 14958 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14959 14960 template <typename Dispatch> destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const14961 VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 14962 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14963 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14964 { 14965 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14966 d.vkDestroyDebugUtilsMessengerEXT( 14967 m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 14968 } 14969 14970 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14971 template <typename Dispatch> destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14972 VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 14973 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14974 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14975 { 14976 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14977 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 14978 VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 14979 # endif 14980 14981 d.vkDestroyDebugUtilsMessengerEXT( 14982 m_instance, 14983 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 14984 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 14985 } 14986 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14987 14988 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const14989 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 14990 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14991 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14992 { 14993 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14994 d.vkDestroyDebugUtilsMessengerEXT( 14995 m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 14996 } 14997 14998 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14999 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15000 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 15001 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15002 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15003 { 15004 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15005 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15006 VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); 15007 # endif 15008 15009 d.vkDestroyDebugUtilsMessengerEXT( 15010 m_instance, 15011 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 15012 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15013 } 15014 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15015 15016 template <typename Dispatch> submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,Dispatch const & d) const15017 VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 15018 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 15019 const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, 15020 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15021 { 15022 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15023 d.vkSubmitDebugUtilsMessageEXT( m_instance, 15024 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 15025 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 15026 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) ); 15027 } 15028 15029 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15030 template <typename Dispatch> submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,Dispatch const & d) const15031 VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 15032 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 15033 const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, 15034 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15035 { 15036 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15037 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15038 VULKAN_HPP_ASSERT( d.vkSubmitDebugUtilsMessageEXT && "Function <vkSubmitDebugUtilsMessageEXT> requires <VK_EXT_debug_utils>" ); 15039 # endif 15040 15041 d.vkSubmitDebugUtilsMessageEXT( m_instance, 15042 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 15043 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 15044 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) ); 15045 } 15046 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15047 15048 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 15049 //=== VK_ANDROID_external_memory_android_hardware_buffer === 15050 15051 template <typename Dispatch> 15052 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer * buffer,VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,Dispatch const & d) const15053 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, 15054 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, 15055 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15056 { 15057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15058 return static_cast<Result>( 15059 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) ); 15060 } 15061 15062 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15063 template <typename Dispatch> 15064 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const15065 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 15066 { 15067 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15068 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15069 VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && 15070 "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); 15071 # endif 15072 15073 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; 15074 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15075 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 15076 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 15077 15078 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 15079 } 15080 15081 template <typename X, typename Y, typename... Z, typename Dispatch> 15082 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const15083 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 15084 { 15085 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15086 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15087 VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && 15088 "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); 15089 # endif 15090 15091 StructureChain<X, Y, Z...> structureChain; 15092 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = 15093 structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>(); 15094 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15095 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 15096 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 15097 15098 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 15099 } 15100 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15101 15102 template <typename Dispatch> 15103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryAndroidHardwareBufferANDROID(const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer,Dispatch const & d) const15104 Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, 15105 struct AHardwareBuffer ** pBuffer, 15106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15107 { 15108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15109 return static_cast<Result>( 15110 d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) ); 15111 } 15112 15113 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15114 template <typename Dispatch> 15115 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type getMemoryAndroidHardwareBufferANDROID(const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info,Dispatch const & d) const15116 Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const 15117 { 15118 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15119 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15120 VULKAN_HPP_ASSERT( d.vkGetMemoryAndroidHardwareBufferANDROID && 15121 "Function <vkGetMemoryAndroidHardwareBufferANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); 15122 # endif 15123 15124 struct AHardwareBuffer * buffer; 15125 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15126 d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) ); 15127 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); 15128 15129 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); 15130 } 15131 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15132 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 15133 15134 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 15135 //=== VK_AMDX_shader_enqueue === 15136 15137 template <typename Dispatch> 15138 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const15139 Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15140 uint32_t createInfoCount, 15141 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, 15142 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15143 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 15144 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15145 { 15146 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15147 return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, 15148 static_cast<VkPipelineCache>( pipelineCache ), 15149 createInfoCount, 15150 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ), 15151 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15152 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 15153 } 15154 15155 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15156 template <typename PipelineAllocator, typename Dispatch> createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15157 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX( 15158 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15159 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 15160 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15161 Dispatch const & d ) const 15162 { 15163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15164 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15165 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15166 # endif 15167 15168 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 15169 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15170 m_device, 15171 static_cast<VkPipelineCache>( pipelineCache ), 15172 createInfos.size(), 15173 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 15174 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15175 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15176 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15177 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", 15178 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15179 15180 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 15181 } 15182 15183 template <typename PipelineAllocator, 15184 typename Dispatch, 15185 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const15186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX( 15187 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15188 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 15189 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15190 PipelineAllocator & pipelineAllocator, 15191 Dispatch const & d ) const 15192 { 15193 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15194 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15195 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15196 # endif 15197 15198 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 15199 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15200 m_device, 15201 static_cast<VkPipelineCache>( pipelineCache ), 15202 createInfos.size(), 15203 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 15204 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15205 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15206 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15207 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", 15208 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15209 15210 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 15211 } 15212 15213 template <typename Dispatch> 15214 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createExecutionGraphPipelineAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15215 Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15216 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, 15217 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15218 Dispatch const & d ) const 15219 { 15220 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15221 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15222 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15223 # endif 15224 15225 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 15226 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15227 m_device, 15228 static_cast<VkPipelineCache>( pipelineCache ), 15229 1, 15230 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), 15231 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15232 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 15233 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15234 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", 15235 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15236 15237 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 15238 } 15239 15240 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15241 template <typename Dispatch, typename PipelineAllocator> 15242 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15243 Device::createExecutionGraphPipelinesAMDXUnique( 15244 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15245 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 15246 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15247 Dispatch const & d ) const 15248 { 15249 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15250 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15251 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15252 # endif 15253 15254 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 15255 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15256 m_device, 15257 static_cast<VkPipelineCache>( pipelineCache ), 15258 createInfos.size(), 15259 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 15260 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15261 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15262 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15263 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", 15264 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15265 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 15266 uniquePipelines.reserve( createInfos.size() ); 15267 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 15268 for ( auto const & pipeline : pipelines ) 15269 { 15270 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 15271 } 15272 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 15273 } 15274 15275 template < 15276 typename Dispatch, 15277 typename PipelineAllocator, 15278 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 15279 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const15280 Device::createExecutionGraphPipelinesAMDXUnique( 15281 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15282 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 15283 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15284 PipelineAllocator & pipelineAllocator, 15285 Dispatch const & d ) const 15286 { 15287 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15288 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15289 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15290 # endif 15291 15292 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 15293 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15294 m_device, 15295 static_cast<VkPipelineCache>( pipelineCache ), 15296 createInfos.size(), 15297 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 15298 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15299 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15300 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15301 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", 15302 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15303 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 15304 uniquePipelines.reserve( createInfos.size() ); 15305 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 15306 for ( auto const & pipeline : pipelines ) 15307 { 15308 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 15309 } 15310 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 15311 } 15312 15313 template <typename Dispatch> 15314 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createExecutionGraphPipelineAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15315 Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15316 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, 15317 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15318 Dispatch const & d ) const 15319 { 15320 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15321 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15322 VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); 15323 # endif 15324 15325 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 15326 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 15327 m_device, 15328 static_cast<VkPipelineCache>( pipelineCache ), 15329 1, 15330 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), 15331 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15332 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 15333 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 15334 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", 15335 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15336 15337 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 15338 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 15339 } 15340 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15341 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15342 15343 template <typename Dispatch> 15344 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExecutionGraphPipelineScratchSizeAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,Dispatch const & d) const15345 Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, 15346 VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, 15347 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15348 { 15349 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15350 return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( 15351 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) ); 15352 } 15353 15354 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15355 template <typename Dispatch> 15356 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::type getExecutionGraphPipelineScratchSizeAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,Dispatch const & d) const15357 Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const 15358 { 15359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15360 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15361 VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineScratchSizeAMDX && 15362 "Function <vkGetExecutionGraphPipelineScratchSizeAMDX> requires <VK_AMDX_shader_enqueue>" ); 15363 # endif 15364 15365 VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; 15366 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( 15367 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) ) ); 15368 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); 15369 15370 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sizeInfo ) ); 15371 } 15372 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15373 15374 template <typename Dispatch> 15375 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExecutionGraphPipelineNodeIndexAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,uint32_t * pNodeIndex,Dispatch const & d) const15376 Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, 15377 const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, 15378 uint32_t * pNodeIndex, 15379 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15380 { 15381 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15382 return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( 15383 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), pNodeIndex ) ); 15384 } 15385 15386 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15387 template <typename Dispatch> getExecutionGraphPipelineNodeIndexAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo,Dispatch const & d) const15388 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type Device::getExecutionGraphPipelineNodeIndexAMDX( 15389 VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const 15390 { 15391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15392 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15393 VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineNodeIndexAMDX && "Function <vkGetExecutionGraphPipelineNodeIndexAMDX> requires <VK_AMDX_shader_enqueue>" ); 15394 # endif 15395 15396 uint32_t nodeIndex; 15397 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( 15398 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ), &nodeIndex ) ); 15399 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); 15400 15401 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( nodeIndex ) ); 15402 } 15403 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15404 15405 template <typename Dispatch> initializeGraphScratchMemoryAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,Dispatch const & d) const15406 VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15407 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15408 { 15409 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15410 d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ) ); 15411 } 15412 15413 template <typename Dispatch> dispatchGraphAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,Dispatch const & d) const15414 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15415 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, 15416 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15417 { 15418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15419 d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); 15420 } 15421 15422 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15423 template <typename Dispatch> dispatchGraphAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,Dispatch const & d) const15424 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15425 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, 15426 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15427 { 15428 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15429 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15430 VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" ); 15431 # endif 15432 15433 d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); 15434 } 15435 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15436 15437 template <typename Dispatch> dispatchGraphIndirectAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,Dispatch const & d) const15438 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15439 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, 15440 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15441 { 15442 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15443 d.vkCmdDispatchGraphIndirectAMDX( 15444 m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); 15445 } 15446 15447 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15448 template <typename Dispatch> dispatchGraphIndirectAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,Dispatch const & d) const15449 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15450 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, 15451 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15452 { 15453 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15454 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15455 VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" ); 15456 # endif 15457 15458 d.vkCmdDispatchGraphIndirectAMDX( 15459 m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); 15460 } 15461 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15462 15463 template <typename Dispatch> dispatchGraphIndirectCountAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,Dispatch const & d) const15464 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 15465 VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, 15466 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15467 { 15468 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15469 d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) ); 15470 } 15471 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 15472 15473 //=== VK_EXT_sample_locations === 15474 15475 template <typename Dispatch> setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,Dispatch const & d) const15476 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, 15477 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15478 { 15479 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15480 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) ); 15481 } 15482 15483 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15484 template <typename Dispatch> setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,Dispatch const & d) const15485 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, 15486 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15487 { 15488 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15489 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15490 VULKAN_HPP_ASSERT( d.vkCmdSetSampleLocationsEXT && "Function <vkCmdSetSampleLocationsEXT> requires <VK_EXT_sample_locations>" ); 15491 # endif 15492 15493 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) ); 15494 } 15495 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15496 15497 template <typename Dispatch> getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,Dispatch const & d) const15498 VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 15499 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, 15500 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15501 { 15502 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15503 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( 15504 m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) ); 15505 } 15506 15507 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15508 template <typename Dispatch> 15509 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,Dispatch const & d) const15510 PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15511 { 15512 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15513 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15514 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMultisamplePropertiesEXT && 15515 "Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> requires <VK_EXT_sample_locations>" ); 15516 # endif 15517 15518 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; 15519 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( 15520 m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) ); 15521 15522 return multisampleProperties; 15523 } 15524 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15525 15526 //=== VK_KHR_get_memory_requirements2 === 15527 15528 template <typename Dispatch> getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const15529 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 15530 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 15531 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15532 { 15533 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15534 d.vkGetImageMemoryRequirements2KHR( 15535 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 15536 } 15537 15538 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15539 template <typename Dispatch> 15540 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const15541 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15542 { 15543 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15544 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15545 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && 15546 "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 15547 # endif 15548 15549 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 15550 d.vkGetImageMemoryRequirements2KHR( 15551 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 15552 15553 return memoryRequirements; 15554 } 15555 15556 template <typename X, typename Y, typename... Z, typename Dispatch> 15557 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const15558 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15559 { 15560 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15561 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15562 VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && 15563 "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 15564 # endif 15565 15566 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 15567 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 15568 d.vkGetImageMemoryRequirements2KHR( 15569 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 15570 15571 return structureChain; 15572 } 15573 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15574 15575 template <typename Dispatch> getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const15576 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 15577 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 15578 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15579 { 15580 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15581 d.vkGetBufferMemoryRequirements2KHR( 15582 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 15583 } 15584 15585 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15586 template <typename Dispatch> 15587 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const15588 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15589 { 15590 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15591 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15592 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && 15593 "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 15594 # endif 15595 15596 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 15597 d.vkGetBufferMemoryRequirements2KHR( 15598 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 15599 15600 return memoryRequirements; 15601 } 15602 15603 template <typename X, typename Y, typename... Z, typename Dispatch> 15604 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const15605 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15606 { 15607 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15608 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15609 VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && 15610 "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 15611 # endif 15612 15613 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 15614 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 15615 d.vkGetBufferMemoryRequirements2KHR( 15616 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 15617 15618 return structureChain; 15619 } 15620 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15621 15622 template <typename Dispatch> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const15623 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 15624 uint32_t * pSparseMemoryRequirementCount, 15625 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 15626 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15627 { 15628 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15629 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 15630 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 15631 pSparseMemoryRequirementCount, 15632 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 15633 } 15634 15635 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15636 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 15637 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const15638 Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const 15639 { 15640 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15641 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15642 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && 15643 "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 15644 # endif 15645 15646 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 15647 uint32_t sparseMemoryRequirementCount; 15648 d.vkGetImageSparseMemoryRequirements2KHR( 15649 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 15650 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 15651 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 15652 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 15653 &sparseMemoryRequirementCount, 15654 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 15655 15656 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 15657 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 15658 { 15659 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 15660 } 15661 return sparseMemoryRequirements; 15662 } 15663 15664 template <typename SparseImageMemoryRequirements2Allocator, 15665 typename Dispatch, 15666 typename std::enable_if< 15667 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 15668 int>::type> 15669 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const15670 Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, 15671 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 15672 Dispatch const & d ) const 15673 { 15674 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15675 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15676 VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && 15677 "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); 15678 # endif 15679 15680 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 15681 sparseImageMemoryRequirements2Allocator ); 15682 uint32_t sparseMemoryRequirementCount; 15683 d.vkGetImageSparseMemoryRequirements2KHR( 15684 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 15685 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 15686 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 15687 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 15688 &sparseMemoryRequirementCount, 15689 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 15690 15691 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 15692 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 15693 { 15694 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 15695 } 15696 return sparseMemoryRequirements; 15697 } 15698 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15699 15700 //=== VK_KHR_acceleration_structure === 15701 15702 template <typename Dispatch> 15703 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,Dispatch const & d) const15704 Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, 15705 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15706 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, 15707 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15708 { 15709 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15710 return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, 15711 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), 15712 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15713 reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) ); 15714 } 15715 15716 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15717 template <typename Dispatch> 15718 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15719 Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, 15720 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15721 Dispatch const & d ) const 15722 { 15723 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15724 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15725 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 15726 # endif 15727 15728 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 15729 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR( 15730 m_device, 15731 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 15732 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15733 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 15734 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); 15735 15736 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); 15737 } 15738 15739 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15740 template <typename Dispatch> 15741 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type createAccelerationStructureKHRUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15742 Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, 15743 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15744 Dispatch const & d ) const 15745 { 15746 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15747 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15748 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 15749 # endif 15750 15751 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 15752 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR( 15753 m_device, 15754 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 15755 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15756 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 15757 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); 15758 15759 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 15760 result, 15761 UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 15762 } 15763 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15764 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15765 15766 template <typename Dispatch> destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const15767 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 15768 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15769 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15770 { 15771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15772 d.vkDestroyAccelerationStructureKHR( 15773 m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 15774 } 15775 15776 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15777 template <typename Dispatch> destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15778 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 15779 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15780 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15781 { 15782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15783 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15784 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 15785 # endif 15786 15787 d.vkDestroyAccelerationStructureKHR( 15788 m_device, 15789 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 15790 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15791 } 15792 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15793 15794 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const15795 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 15796 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15797 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15798 { 15799 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15800 d.vkDestroyAccelerationStructureKHR( 15801 m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 15802 } 15803 15804 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15805 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15806 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 15807 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15808 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15809 { 15810 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15811 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15812 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 15813 # endif 15814 15815 d.vkDestroyAccelerationStructureKHR( 15816 m_device, 15817 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 15818 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15819 } 15820 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15821 15822 template <typename Dispatch> 15823 VULKAN_HPP_INLINE void buildAccelerationStructuresKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const15824 CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, 15825 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 15826 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 15827 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15828 { 15829 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15830 d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, 15831 infoCount, 15832 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 15833 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ); 15834 } 15835 15836 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15837 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const15838 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( 15839 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 15840 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 15841 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 15842 { 15843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15844 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15845 VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresKHR && "Function <vkCmdBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" ); 15846 # endif 15847 # ifdef VULKAN_HPP_NO_EXCEPTIONS 15848 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 15849 # else 15850 if ( infos.size() != pBuildRangeInfos.size() ) 15851 { 15852 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 15853 } 15854 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 15855 15856 d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, 15857 infos.size(), 15858 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 15859 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ); 15860 } 15861 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15862 15863 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,const uint32_t * pIndirectStrides,const uint32_t * const * ppMaxPrimitiveCounts,Dispatch const & d) const15864 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, 15865 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 15866 const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, 15867 const uint32_t * pIndirectStrides, 15868 const uint32_t * const * ppMaxPrimitiveCounts, 15869 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15870 { 15871 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15872 d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, 15873 infoCount, 15874 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 15875 reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ), 15876 pIndirectStrides, 15877 ppMaxPrimitiveCounts ); 15878 } 15879 15880 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15881 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,Dispatch const & d) const15882 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( 15883 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 15884 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, 15885 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides, 15886 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts, 15887 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 15888 { 15889 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15890 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15891 VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresIndirectKHR && 15892 "Function <vkCmdBuildAccelerationStructuresIndirectKHR> requires <VK_KHR_acceleration_structure>" ); 15893 # endif 15894 # ifdef VULKAN_HPP_NO_EXCEPTIONS 15895 VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); 15896 VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); 15897 VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); 15898 # else 15899 if ( infos.size() != indirectDeviceAddresses.size() ) 15900 { 15901 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); 15902 } 15903 if ( infos.size() != indirectStrides.size() ) 15904 { 15905 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); 15906 } 15907 if ( infos.size() != pMaxPrimitiveCounts.size() ) 15908 { 15909 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); 15910 } 15911 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 15912 15913 d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, 15914 infos.size(), 15915 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 15916 reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ), 15917 indirectStrides.data(), 15918 pMaxPrimitiveCounts.data() ); 15919 } 15920 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15921 15922 template <typename Dispatch> 15923 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const15924 Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 15925 uint32_t infoCount, 15926 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 15927 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 15928 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15929 { 15930 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15931 return static_cast<Result>( 15932 d.vkBuildAccelerationStructuresKHR( m_device, 15933 static_cast<VkDeferredOperationKHR>( deferredOperation ), 15934 infoCount, 15935 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 15936 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) ); 15937 } 15938 15939 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15940 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const15941 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( 15942 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 15943 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 15944 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 15945 Dispatch const & d ) const 15946 { 15947 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15948 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15949 VULKAN_HPP_ASSERT( d.vkBuildAccelerationStructuresKHR && "Function <vkBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" ); 15950 # endif 15951 # ifdef VULKAN_HPP_NO_EXCEPTIONS 15952 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 15953 # else 15954 if ( infos.size() != pBuildRangeInfos.size() ) 15955 { 15956 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 15957 } 15958 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 15959 15960 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15961 d.vkBuildAccelerationStructuresKHR( m_device, 15962 static_cast<VkDeferredOperationKHR>( deferredOperation ), 15963 infos.size(), 15964 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 15965 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) ); 15966 VULKAN_HPP_NAMESPACE::detail::resultCheck( 15967 result, 15968 VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", 15969 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 15970 15971 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 15972 } 15973 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15974 15975 template <typename Dispatch> copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const15976 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 15977 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 15978 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15979 { 15980 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15981 return static_cast<Result>( d.vkCopyAccelerationStructureKHR( 15982 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) ); 15983 } 15984 15985 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15986 template <typename Dispatch> 15987 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const15988 Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 15989 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, 15990 Dispatch const & d ) const 15991 { 15992 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15993 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 15994 VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureKHR && "Function <vkCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 15995 # endif 15996 15997 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureKHR( 15998 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) ); 15999 VULKAN_HPP_NAMESPACE::detail::resultCheck( 16000 result, 16001 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", 16002 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 16003 16004 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 16005 } 16006 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16007 16008 template <typename Dispatch> 16009 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const16010 Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16011 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 16012 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16013 { 16014 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16015 return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( 16016 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) ); 16017 } 16018 16019 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16020 template <typename Dispatch> 16021 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const16022 Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16023 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, 16024 Dispatch const & d ) const 16025 { 16026 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16027 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16028 VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureToMemoryKHR && 16029 "Function <vkCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" ); 16030 # endif 16031 16032 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureToMemoryKHR( 16033 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) ); 16034 VULKAN_HPP_NAMESPACE::detail::resultCheck( 16035 result, 16036 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", 16037 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 16038 16039 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 16040 } 16041 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16042 16043 template <typename Dispatch> 16044 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const16045 Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16046 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 16047 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16048 { 16049 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16050 return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( 16051 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) ); 16052 } 16053 16054 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16055 template <typename Dispatch> 16056 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const16057 Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16058 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, 16059 Dispatch const & d ) const 16060 { 16061 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16062 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16063 VULKAN_HPP_ASSERT( d.vkCopyMemoryToAccelerationStructureKHR && 16064 "Function <vkCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16065 # endif 16066 16067 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToAccelerationStructureKHR( 16068 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) ); 16069 VULKAN_HPP_NAMESPACE::detail::resultCheck( 16070 result, 16071 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", 16072 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 16073 16074 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 16075 } 16076 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16077 16078 template <typename Dispatch> 16079 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,void * pData,size_t stride,Dispatch const & d) const16080 Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, 16081 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 16082 VULKAN_HPP_NAMESPACE::QueryType queryType, 16083 size_t dataSize, 16084 void * pData, 16085 size_t stride, 16086 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16087 { 16088 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16089 return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 16090 accelerationStructureCount, 16091 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 16092 static_cast<VkQueryType>( queryType ), 16093 dataSize, 16094 pData, 16095 stride ) ); 16096 } 16097 16098 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16099 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 16100 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeAccelerationStructuresPropertiesKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,size_t stride,Dispatch const & d) const16101 Device::writeAccelerationStructuresPropertiesKHR( 16102 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 16103 VULKAN_HPP_NAMESPACE::QueryType queryType, 16104 size_t dataSize, 16105 size_t stride, 16106 Dispatch const & d ) const 16107 { 16108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16109 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16110 VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && 16111 "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); 16112 # endif 16113 16114 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 16115 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 16116 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16117 d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 16118 accelerationStructures.size(), 16119 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 16120 static_cast<VkQueryType>( queryType ), 16121 data.size() * sizeof( DataType ), 16122 reinterpret_cast<void *>( data.data() ), 16123 stride ) ); 16124 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); 16125 16126 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 16127 } 16128 16129 template <typename DataType, typename Dispatch> writeAccelerationStructuresPropertyKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t stride,Dispatch const & d) const16130 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeAccelerationStructuresPropertyKHR( 16131 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 16132 VULKAN_HPP_NAMESPACE::QueryType queryType, 16133 size_t stride, 16134 Dispatch const & d ) const 16135 { 16136 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16137 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16138 VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && 16139 "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); 16140 # endif 16141 16142 DataType data; 16143 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16144 d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 16145 accelerationStructures.size(), 16146 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 16147 static_cast<VkQueryType>( queryType ), 16148 sizeof( DataType ), 16149 reinterpret_cast<void *>( &data ), 16150 stride ) ); 16151 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); 16152 16153 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 16154 } 16155 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16156 16157 template <typename Dispatch> copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const16158 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 16159 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16160 { 16161 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16162 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ); 16163 } 16164 16165 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16166 template <typename Dispatch> copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const16167 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, 16168 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16169 { 16170 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16171 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16172 VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureKHR && "Function <vkCmdCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16173 # endif 16174 16175 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ); 16176 } 16177 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16178 16179 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const16180 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 16181 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16182 { 16183 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16184 d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ); 16185 } 16186 16187 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16188 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const16189 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, 16190 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16191 { 16192 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16193 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16194 VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureToMemoryKHR && 16195 "Function <vkCmdCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" ); 16196 # endif 16197 16198 d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ); 16199 } 16200 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16201 16202 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const16203 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 16204 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16205 { 16206 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16207 d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ); 16208 } 16209 16210 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16211 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const16212 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, 16213 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16214 { 16215 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16216 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16217 VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToAccelerationStructureKHR && 16218 "Function <vkCmdCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); 16219 # endif 16220 16221 d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ); 16222 } 16223 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16224 16225 template <typename Dispatch> getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,Dispatch const & d) const16226 VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, 16227 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16228 { 16229 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16230 return static_cast<DeviceAddress>( 16231 d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) ); 16232 } 16233 16234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16235 template <typename Dispatch> 16236 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,Dispatch const & d) const16237 Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, 16238 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16239 { 16240 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16241 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16242 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureDeviceAddressKHR && 16243 "Function <vkGetAccelerationStructureDeviceAddressKHR> requires <VK_KHR_acceleration_structure>" ); 16244 # endif 16245 16246 VkDeviceAddress result = 16247 d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) ); 16248 16249 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 16250 } 16251 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16252 16253 template <typename Dispatch> 16254 VULKAN_HPP_INLINE void writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const16255 CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, 16256 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 16257 VULKAN_HPP_NAMESPACE::QueryType queryType, 16258 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 16259 uint32_t firstQuery, 16260 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16261 { 16262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16263 d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, 16264 accelerationStructureCount, 16265 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 16266 static_cast<VkQueryType>( queryType ), 16267 static_cast<VkQueryPool>( queryPool ), 16268 firstQuery ); 16269 } 16270 16271 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16272 template <typename Dispatch> writeAccelerationStructuresPropertiesKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const16273 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( 16274 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 16275 VULKAN_HPP_NAMESPACE::QueryType queryType, 16276 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 16277 uint32_t firstQuery, 16278 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16279 { 16280 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16281 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16282 VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesKHR && 16283 "Function <vkCmdWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); 16284 # endif 16285 16286 d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, 16287 accelerationStructures.size(), 16288 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 16289 static_cast<VkQueryType>( queryType ), 16290 static_cast<VkQueryPool>( queryPool ), 16291 firstQuery ); 16292 } 16293 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16294 16295 template <typename Dispatch> getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,Dispatch const & d) const16296 VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, 16297 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, 16298 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16299 { 16300 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16301 d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, 16302 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ), 16303 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); 16304 } 16305 16306 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16307 template <typename Dispatch> 16308 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,Dispatch const & d) const16309 Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, 16310 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16311 { 16312 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16313 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16314 VULKAN_HPP_ASSERT( d.vkGetDeviceAccelerationStructureCompatibilityKHR && 16315 "Function <vkGetDeviceAccelerationStructureCompatibilityKHR> requires <VK_KHR_acceleration_structure>" ); 16316 # endif 16317 16318 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; 16319 d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, 16320 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ), 16321 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); 16322 16323 return compatibility; 16324 } 16325 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16326 16327 template <typename Dispatch> getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,const uint32_t * pMaxPrimitiveCounts,VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,Dispatch const & d) const16328 VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 16329 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, 16330 const uint32_t * pMaxPrimitiveCounts, 16331 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, 16332 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16333 { 16334 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16335 d.vkGetAccelerationStructureBuildSizesKHR( m_device, 16336 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 16337 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ), 16338 pMaxPrimitiveCounts, 16339 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) ); 16340 } 16341 16342 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16343 template <typename Dispatch> 16344 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts,Dispatch const & d) const16345 Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 16346 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, 16347 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts, 16348 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 16349 { 16350 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16351 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16352 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureBuildSizesKHR && 16353 "Function <vkGetAccelerationStructureBuildSizesKHR> requires <VK_KHR_acceleration_structure>" ); 16354 # endif 16355 # ifdef VULKAN_HPP_NO_EXCEPTIONS 16356 VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); 16357 # else 16358 if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) 16359 { 16360 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); 16361 } 16362 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 16363 16364 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; 16365 d.vkGetAccelerationStructureBuildSizesKHR( m_device, 16366 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 16367 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ), 16368 maxPrimitiveCounts.data(), 16369 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) ); 16370 16371 return sizeInfo; 16372 } 16373 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16374 16375 //=== VK_KHR_ray_tracing_pipeline === 16376 16377 template <typename Dispatch> traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const16378 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 16379 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 16380 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 16381 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 16382 uint32_t width, 16383 uint32_t height, 16384 uint32_t depth, 16385 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16386 { 16387 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16388 d.vkCmdTraceRaysKHR( m_commandBuffer, 16389 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 16390 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 16391 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 16392 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 16393 width, 16394 height, 16395 depth ); 16396 } 16397 16398 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16399 template <typename Dispatch> traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const16400 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 16401 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, 16402 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, 16403 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, 16404 uint32_t width, 16405 uint32_t height, 16406 uint32_t depth, 16407 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16408 { 16409 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16410 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16411 VULKAN_HPP_ASSERT( d.vkCmdTraceRaysKHR && "Function <vkCmdTraceRaysKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16412 # endif 16413 16414 d.vkCmdTraceRaysKHR( m_commandBuffer, 16415 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 16416 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 16417 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 16418 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 16419 width, 16420 height, 16421 depth ); 16422 } 16423 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16424 16425 template <typename Dispatch> 16426 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const16427 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16428 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16429 uint32_t createInfoCount, 16430 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, 16431 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16432 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 16433 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16434 { 16435 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16436 return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, 16437 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16438 static_cast<VkPipelineCache>( pipelineCache ), 16439 createInfoCount, 16440 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ), 16441 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 16442 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 16443 } 16444 16445 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16446 template <typename PipelineAllocator, typename Dispatch> 16447 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16448 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16449 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16450 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 16451 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16452 Dispatch const & d ) const 16453 { 16454 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16455 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16456 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16457 # endif 16458 16459 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 16460 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 16461 m_device, 16462 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16463 static_cast<VkPipelineCache>( pipelineCache ), 16464 createInfos.size(), 16465 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 16466 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16467 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16468 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16469 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 16470 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 16471 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 16472 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 16473 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16474 16475 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 16476 } 16477 16478 template <typename PipelineAllocator, 16479 typename Dispatch, 16480 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 16481 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const16482 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16483 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16484 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 16485 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16486 PipelineAllocator & pipelineAllocator, 16487 Dispatch const & d ) const 16488 { 16489 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16490 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16491 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16492 # endif 16493 16494 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 16495 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 16496 m_device, 16497 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16498 static_cast<VkPipelineCache>( pipelineCache ), 16499 createInfos.size(), 16500 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 16501 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16502 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16503 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16504 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 16505 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 16506 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 16507 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 16508 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16509 16510 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 16511 } 16512 16513 template <typename Dispatch> 16514 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16515 Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16516 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16517 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 16518 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16519 Dispatch const & d ) const 16520 { 16521 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16522 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16523 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16524 # endif 16525 16526 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 16527 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 16528 m_device, 16529 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16530 static_cast<VkPipelineCache>( pipelineCache ), 16531 1, 16532 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 16533 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16534 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 16535 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16536 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", 16537 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 16538 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 16539 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 16540 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16541 16542 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 16543 } 16544 16545 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16546 template <typename Dispatch, typename PipelineAllocator> 16547 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16548 Device::createRayTracingPipelinesKHRUnique( 16549 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16550 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16551 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 16552 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16553 Dispatch const & d ) const 16554 { 16555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16556 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16557 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16558 # endif 16559 16560 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 16561 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 16562 m_device, 16563 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16564 static_cast<VkPipelineCache>( pipelineCache ), 16565 createInfos.size(), 16566 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 16567 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16568 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16569 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16570 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 16571 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 16572 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 16573 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 16574 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16575 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 16576 uniquePipelines.reserve( createInfos.size() ); 16577 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 16578 for ( auto const & pipeline : pipelines ) 16579 { 16580 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 16581 } 16582 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 16583 } 16584 16585 template < 16586 typename Dispatch, 16587 typename PipelineAllocator, 16588 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 16589 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const16590 Device::createRayTracingPipelinesKHRUnique( 16591 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16592 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16593 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 16594 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16595 PipelineAllocator & pipelineAllocator, 16596 Dispatch const & d ) const 16597 { 16598 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16599 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16600 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16601 # endif 16602 16603 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 16604 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 16605 m_device, 16606 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16607 static_cast<VkPipelineCache>( pipelineCache ), 16608 createInfos.size(), 16609 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 16610 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16611 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16612 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16613 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 16614 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 16615 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 16616 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 16617 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16618 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 16619 uniquePipelines.reserve( createInfos.size() ); 16620 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 16621 for ( auto const & pipeline : pipelines ) 16622 { 16623 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 16624 } 16625 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 16626 } 16627 16628 template <typename Dispatch> 16629 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16630 Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 16631 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16632 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 16633 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16634 Dispatch const & d ) const 16635 { 16636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16637 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16638 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16639 # endif 16640 16641 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 16642 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 16643 m_device, 16644 static_cast<VkDeferredOperationKHR>( deferredOperation ), 16645 static_cast<VkPipelineCache>( pipelineCache ), 16646 1, 16647 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 16648 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16649 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 16650 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 16651 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", 16652 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 16653 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 16654 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 16655 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16656 16657 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 16658 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 16659 } 16660 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 16661 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16662 16663 template <typename Dispatch> getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const16664 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 16665 uint32_t firstGroup, 16666 uint32_t groupCount, 16667 size_t dataSize, 16668 void * pData, 16669 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16670 { 16671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16672 return static_cast<Result>( 16673 d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 16674 } 16675 16676 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16677 template <typename DataType, typename DataTypeAllocator, typename Dispatch> getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const16678 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesKHR( 16679 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 16680 { 16681 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16682 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16683 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && 16684 "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 16685 # endif 16686 16687 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 16688 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 16689 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 16690 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 16691 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); 16692 16693 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 16694 } 16695 16696 template <typename DataType, typename Dispatch> 16697 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const16698 Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 16699 { 16700 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16701 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16702 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && 16703 "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 16704 # endif 16705 16706 DataType data; 16707 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 16708 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 16709 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); 16710 16711 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 16712 } 16713 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16714 16715 template <typename Dispatch> getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const16716 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 16717 uint32_t firstGroup, 16718 uint32_t groupCount, 16719 size_t dataSize, 16720 void * pData, 16721 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16722 { 16723 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16724 return static_cast<Result>( 16725 d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 16726 } 16727 16728 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16729 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 16730 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const16731 Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( 16732 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 16733 { 16734 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16735 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16736 VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && 16737 "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16738 # endif 16739 16740 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 16741 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 16742 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 16743 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 16744 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); 16745 16746 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 16747 } 16748 16749 template <typename DataType, typename Dispatch> getRayTracingCaptureReplayShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const16750 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( 16751 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 16752 { 16753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16754 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16755 VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && 16756 "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16757 # endif 16758 16759 DataType data; 16760 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 16761 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 16762 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); 16763 16764 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 16765 } 16766 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16767 16768 template <typename Dispatch> traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const16769 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 16770 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 16771 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 16772 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 16773 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 16774 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16775 { 16776 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16777 d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, 16778 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 16779 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 16780 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 16781 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 16782 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 16783 } 16784 16785 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16786 template <typename Dispatch> traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const16787 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 16788 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, 16789 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, 16790 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, 16791 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 16792 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16793 { 16794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16795 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16796 VULKAN_HPP_ASSERT( d.vkCmdTraceRaysIndirectKHR && "Function <vkCmdTraceRaysIndirectKHR> requires <VK_KHR_ray_tracing_pipeline>" ); 16797 # endif 16798 16799 d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, 16800 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 16801 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 16802 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 16803 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 16804 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 16805 } 16806 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16807 16808 template <typename Dispatch> getRayTracingShaderGroupStackSizeKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t group,VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,Dispatch const & d) const16809 VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 16810 uint32_t group, 16811 VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, 16812 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16813 { 16814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16815 return static_cast<DeviceSize>( 16816 d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) ); 16817 } 16818 16819 template <typename Dispatch> setRayTracingPipelineStackSizeKHR(uint32_t pipelineStackSize,Dispatch const & d) const16820 VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16821 { 16822 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16823 d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); 16824 } 16825 16826 //=== VK_KHR_sampler_ycbcr_conversion === 16827 16828 template <typename Dispatch> 16829 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const16830 Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 16831 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16832 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 16833 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16834 { 16835 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16836 return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, 16837 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 16838 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 16839 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 16840 } 16841 16842 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16843 template <typename Dispatch> 16844 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16845 Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 16846 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16847 Dispatch const & d ) const 16848 { 16849 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16850 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16851 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && 16852 "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 16853 # endif 16854 16855 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 16856 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR( 16857 m_device, 16858 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 16859 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16860 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 16861 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); 16862 16863 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); 16864 } 16865 16866 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16867 template <typename Dispatch> 16868 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionKHRUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16869 Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 16870 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16871 Dispatch const & d ) const 16872 { 16873 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16874 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16875 VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && 16876 "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 16877 # endif 16878 16879 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 16880 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR( 16881 m_device, 16882 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 16883 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16884 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 16885 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); 16886 16887 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 16888 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 16889 } 16890 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 16891 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16892 16893 template <typename Dispatch> destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16894 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 16895 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16896 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16897 { 16898 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16899 d.vkDestroySamplerYcbcrConversionKHR( 16900 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16901 } 16902 16903 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16904 template <typename Dispatch> destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16905 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 16906 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16907 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16908 { 16909 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16910 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16911 VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversionKHR && 16912 "Function <vkDestroySamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); 16913 # endif 16914 16915 d.vkDestroySamplerYcbcrConversionKHR( 16916 m_device, 16917 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 16918 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16919 } 16920 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16921 16922 //=== VK_KHR_bind_memory2 === 16923 16924 template <typename Dispatch> bindBufferMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const16925 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, 16926 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 16927 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16928 { 16929 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16930 return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 16931 } 16932 16933 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16934 template <typename Dispatch> 16935 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const16936 Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, 16937 Dispatch const & d ) const 16938 { 16939 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16940 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16941 VULKAN_HPP_ASSERT( d.vkBindBufferMemory2KHR && "Function <vkBindBufferMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 16942 # endif 16943 16944 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16945 d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 16946 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); 16947 16948 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 16949 } 16950 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16951 16952 template <typename Dispatch> bindImageMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const16953 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, 16954 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 16955 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16956 { 16957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16958 return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 16959 } 16960 16961 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16962 template <typename Dispatch> 16963 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const16964 Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const 16965 { 16966 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16967 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16968 VULKAN_HPP_ASSERT( d.vkBindImageMemory2KHR && "Function <vkBindImageMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); 16969 # endif 16970 16971 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16972 d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 16973 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); 16974 16975 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 16976 } 16977 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16978 16979 //=== VK_EXT_image_drm_format_modifier === 16980 16981 template <typename Dispatch> getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,Dispatch const & d) const16982 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( 16983 VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16984 { 16985 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16986 return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 16987 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) ); 16988 } 16989 16990 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16991 template <typename Dispatch> 16992 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const16993 Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 16994 { 16995 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16996 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 16997 VULKAN_HPP_ASSERT( d.vkGetImageDrmFormatModifierPropertiesEXT && 16998 "Function <vkGetImageDrmFormatModifierPropertiesEXT> requires <VK_EXT_image_drm_format_modifier>" ); 16999 # endif 17000 17001 VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; 17002 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 17003 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) ); 17004 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); 17005 17006 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 17007 } 17008 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17009 17010 //=== VK_EXT_validation_cache === 17011 17012 template <typename Dispatch> createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,Dispatch const & d) const17013 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, 17014 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17015 VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, 17016 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17017 { 17018 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17019 return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, 17020 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), 17021 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17022 reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) ); 17023 } 17024 17025 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17026 template <typename Dispatch> 17027 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17028 Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, 17029 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17030 Dispatch const & d ) const 17031 { 17032 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17033 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17034 VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 17035 # endif 17036 17037 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 17038 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT( 17039 m_device, 17040 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 17041 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17042 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 17043 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); 17044 17045 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( validationCache ) ); 17046 } 17047 17048 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17049 template <typename Dispatch> 17050 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type createValidationCacheEXTUnique(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17051 Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, 17052 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17053 Dispatch const & d ) const 17054 { 17055 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17056 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17057 VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 17058 # endif 17059 17060 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 17061 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT( 17062 m_device, 17063 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 17064 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17065 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 17066 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); 17067 17068 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 17069 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( validationCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 17070 } 17071 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17072 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17073 17074 template <typename Dispatch> destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const17075 VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17076 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17077 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17078 { 17079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17080 d.vkDestroyValidationCacheEXT( 17081 m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 17082 } 17083 17084 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17085 template <typename Dispatch> destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17086 VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17087 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17088 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17089 { 17090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17091 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17092 VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 17093 # endif 17094 17095 d.vkDestroyValidationCacheEXT( 17096 m_device, 17097 static_cast<VkValidationCacheEXT>( validationCache ), 17098 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 17099 } 17100 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17101 17102 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const17103 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17104 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17105 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17106 { 17107 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17108 d.vkDestroyValidationCacheEXT( 17109 m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 17110 } 17111 17112 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17113 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17114 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17115 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17116 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17117 { 17118 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17119 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17120 VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" ); 17121 # endif 17122 17123 d.vkDestroyValidationCacheEXT( 17124 m_device, 17125 static_cast<VkValidationCacheEXT>( validationCache ), 17126 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 17127 } 17128 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17129 17130 template <typename Dispatch> mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,Dispatch const & d) const17131 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 17132 uint32_t srcCacheCount, 17133 const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, 17134 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17135 { 17136 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17137 return static_cast<Result>( d.vkMergeValidationCachesEXT( 17138 m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) ); 17139 } 17140 17141 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17142 template <typename Dispatch> 17143 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,Dispatch const & d) const17144 Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 17145 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, 17146 Dispatch const & d ) const 17147 { 17148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17149 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17150 VULKAN_HPP_ASSERT( d.vkMergeValidationCachesEXT && "Function <vkMergeValidationCachesEXT> requires <VK_EXT_validation_cache>" ); 17151 # endif 17152 17153 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergeValidationCachesEXT( 17154 m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) ); 17155 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); 17156 17157 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 17158 } 17159 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17160 17161 template <typename Dispatch> getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,size_t * pDataSize,void * pData,Dispatch const & d) const17162 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 17163 size_t * pDataSize, 17164 void * pData, 17165 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17166 { 17167 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17168 return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) ); 17169 } 17170 17171 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17172 template <typename Uint8_tAllocator, typename Dispatch> 17173 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Dispatch const & d) const17174 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const 17175 { 17176 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17177 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17178 VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" ); 17179 # endif 17180 17181 std::vector<uint8_t, Uint8_tAllocator> data; 17182 size_t dataSize; 17183 VULKAN_HPP_NAMESPACE::Result result; 17184 do 17185 { 17186 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17187 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 17188 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 17189 { 17190 data.resize( dataSize ); 17191 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17192 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 17193 } 17194 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17195 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 17196 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 17197 if ( dataSize < data.size() ) 17198 { 17199 data.resize( dataSize ); 17200 } 17201 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17202 } 17203 17204 template <typename Uint8_tAllocator, 17205 typename Dispatch, 17206 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 17207 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const17208 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 17209 { 17210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17211 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17212 VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" ); 17213 # endif 17214 17215 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 17216 size_t dataSize; 17217 VULKAN_HPP_NAMESPACE::Result result; 17218 do 17219 { 17220 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17221 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 17222 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 17223 { 17224 data.resize( dataSize ); 17225 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17226 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 17227 } 17228 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17229 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 17230 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 17231 if ( dataSize < data.size() ) 17232 { 17233 data.resize( dataSize ); 17234 } 17235 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17236 } 17237 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17238 17239 //=== VK_NV_shading_rate_image === 17240 17241 template <typename Dispatch> bindShadingRateImageNV(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,Dispatch const & d) const17242 VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, 17243 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 17244 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17245 { 17246 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17247 d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); 17248 } 17249 17250 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,Dispatch const & d) const17251 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, 17252 uint32_t viewportCount, 17253 const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, 17254 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17255 { 17256 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17257 d.vkCmdSetViewportShadingRatePaletteNV( 17258 m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) ); 17259 } 17260 17261 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17262 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,Dispatch const & d) const17263 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( 17264 uint32_t firstViewport, 17265 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, 17266 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17267 { 17268 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17269 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17270 VULKAN_HPP_ASSERT( d.vkCmdSetViewportShadingRatePaletteNV && "Function <vkCmdSetViewportShadingRatePaletteNV> requires <VK_NV_shading_rate_image>" ); 17271 # endif 17272 17273 d.vkCmdSetViewportShadingRatePaletteNV( 17274 m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) ); 17275 } 17276 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17277 17278 template <typename Dispatch> setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,uint32_t customSampleOrderCount,const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,Dispatch const & d) const17279 VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 17280 uint32_t customSampleOrderCount, 17281 const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, 17282 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17283 { 17284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17285 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, 17286 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 17287 customSampleOrderCount, 17288 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) ); 17289 } 17290 17291 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17292 template <typename Dispatch> 17293 VULKAN_HPP_INLINE void setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,Dispatch const & d) const17294 CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 17295 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, 17296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17297 { 17298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17299 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17300 VULKAN_HPP_ASSERT( d.vkCmdSetCoarseSampleOrderNV && "Function <vkCmdSetCoarseSampleOrderNV> requires <VK_NV_shading_rate_image>" ); 17301 # endif 17302 17303 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, 17304 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 17305 customSampleOrders.size(), 17306 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) ); 17307 } 17308 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17309 17310 //=== VK_NV_ray_tracing === 17311 17312 template <typename Dispatch> 17313 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,Dispatch const & d) const17314 Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, 17315 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17316 VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, 17317 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17318 { 17319 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17320 return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, 17321 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), 17322 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17323 reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) ); 17324 } 17325 17326 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17327 template <typename Dispatch> 17328 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17329 Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, 17330 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17331 Dispatch const & d ) const 17332 { 17333 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17334 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17335 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 17336 # endif 17337 17338 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 17339 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV( 17340 m_device, 17341 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 17342 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17343 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 17344 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); 17345 17346 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); 17347 } 17348 17349 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17350 template <typename Dispatch> 17351 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type createAccelerationStructureNVUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17352 Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, 17353 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17354 Dispatch const & d ) const 17355 { 17356 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17357 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17358 VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 17359 # endif 17360 17361 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 17362 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV( 17363 m_device, 17364 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 17365 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17366 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 17367 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); 17368 17369 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 17370 result, 17371 UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 17372 } 17373 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17374 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17375 17376 template <typename Dispatch> destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const17377 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 17378 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17379 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17380 { 17381 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17382 d.vkDestroyAccelerationStructureNV( 17383 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 17384 } 17385 17386 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17387 template <typename Dispatch> destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17388 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 17389 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17390 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17391 { 17392 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17393 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17394 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 17395 # endif 17396 17397 d.vkDestroyAccelerationStructureNV( 17398 m_device, 17399 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 17400 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 17401 } 17402 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17403 17404 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const17405 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 17406 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17407 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17408 { 17409 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17410 d.vkDestroyAccelerationStructureNV( 17411 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 17412 } 17413 17414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17415 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17416 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 17417 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17418 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17419 { 17420 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17421 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17422 VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 17423 # endif 17424 17425 d.vkDestroyAccelerationStructureNV( 17426 m_device, 17427 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 17428 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 17429 } 17430 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17431 17432 template <typename Dispatch> 17433 VULKAN_HPP_INLINE void getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,Dispatch const & d) const17434 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, 17435 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, 17436 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17437 { 17438 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17439 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 17440 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), 17441 reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) ); 17442 } 17443 17444 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17445 template <typename Dispatch> 17446 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const17447 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, 17448 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17449 { 17450 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17451 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17452 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && 17453 "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" ); 17454 # endif 17455 17456 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; 17457 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 17458 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 17459 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 17460 17461 return memoryRequirements; 17462 } 17463 17464 template <typename X, typename Y, typename... Z, typename Dispatch> 17465 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const17466 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, 17467 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17468 { 17469 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17470 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17471 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && 17472 "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" ); 17473 # endif 17474 17475 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 17476 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>(); 17477 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 17478 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 17479 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 17480 17481 return structureChain; 17482 } 17483 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17484 17485 template <typename Dispatch> bindAccelerationStructureMemoryNV(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,Dispatch const & d) const17486 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( 17487 uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17488 { 17489 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17490 return static_cast<Result>( 17491 d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) ); 17492 } 17493 17494 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17495 template <typename Dispatch> bindAccelerationStructureMemoryNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,Dispatch const & d) const17496 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( 17497 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const 17498 { 17499 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17500 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17501 VULKAN_HPP_ASSERT( d.vkBindAccelerationStructureMemoryNV && "Function <vkBindAccelerationStructureMemoryNV> requires <VK_NV_ray_tracing>" ); 17502 # endif 17503 17504 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindAccelerationStructureMemoryNV( 17505 m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) ); 17506 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); 17507 17508 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 17509 } 17510 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17511 17512 template <typename Dispatch> buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const17513 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, 17514 VULKAN_HPP_NAMESPACE::Buffer instanceData, 17515 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 17516 VULKAN_HPP_NAMESPACE::Bool32 update, 17517 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 17518 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 17519 VULKAN_HPP_NAMESPACE::Buffer scratch, 17520 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 17521 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17522 { 17523 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17524 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, 17525 reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), 17526 static_cast<VkBuffer>( instanceData ), 17527 static_cast<VkDeviceSize>( instanceOffset ), 17528 static_cast<VkBool32>( update ), 17529 static_cast<VkAccelerationStructureNV>( dst ), 17530 static_cast<VkAccelerationStructureNV>( src ), 17531 static_cast<VkBuffer>( scratch ), 17532 static_cast<VkDeviceSize>( scratchOffset ) ); 17533 } 17534 17535 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17536 template <typename Dispatch> buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const17537 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, 17538 VULKAN_HPP_NAMESPACE::Buffer instanceData, 17539 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 17540 VULKAN_HPP_NAMESPACE::Bool32 update, 17541 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 17542 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 17543 VULKAN_HPP_NAMESPACE::Buffer scratch, 17544 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 17545 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17546 { 17547 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17548 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17549 VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructureNV && "Function <vkCmdBuildAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); 17550 # endif 17551 17552 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, 17553 reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), 17554 static_cast<VkBuffer>( instanceData ), 17555 static_cast<VkDeviceSize>( instanceOffset ), 17556 static_cast<VkBool32>( update ), 17557 static_cast<VkAccelerationStructureNV>( dst ), 17558 static_cast<VkAccelerationStructureNV>( src ), 17559 static_cast<VkBuffer>( scratch ), 17560 static_cast<VkDeviceSize>( scratchOffset ) ); 17561 } 17562 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17563 17564 template <typename Dispatch> copyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,Dispatch const & d) const17565 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 17566 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 17567 VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, 17568 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17569 { 17570 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17571 d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, 17572 static_cast<VkAccelerationStructureNV>( dst ), 17573 static_cast<VkAccelerationStructureNV>( src ), 17574 static_cast<VkCopyAccelerationStructureModeKHR>( mode ) ); 17575 } 17576 17577 template <typename Dispatch> traceRaysNV(VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const17578 VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, 17579 VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, 17580 VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, 17581 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, 17582 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, 17583 VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, 17584 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, 17585 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, 17586 VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, 17587 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, 17588 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, 17589 uint32_t width, 17590 uint32_t height, 17591 uint32_t depth, 17592 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17593 { 17594 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17595 d.vkCmdTraceRaysNV( m_commandBuffer, 17596 static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), 17597 static_cast<VkDeviceSize>( raygenShaderBindingOffset ), 17598 static_cast<VkBuffer>( missShaderBindingTableBuffer ), 17599 static_cast<VkDeviceSize>( missShaderBindingOffset ), 17600 static_cast<VkDeviceSize>( missShaderBindingStride ), 17601 static_cast<VkBuffer>( hitShaderBindingTableBuffer ), 17602 static_cast<VkDeviceSize>( hitShaderBindingOffset ), 17603 static_cast<VkDeviceSize>( hitShaderBindingStride ), 17604 static_cast<VkBuffer>( callableShaderBindingTableBuffer ), 17605 static_cast<VkDeviceSize>( callableShaderBindingOffset ), 17606 static_cast<VkDeviceSize>( callableShaderBindingStride ), 17607 width, 17608 height, 17609 depth ); 17610 } 17611 17612 template <typename Dispatch> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const17613 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17614 uint32_t createInfoCount, 17615 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, 17616 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17617 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 17618 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17619 { 17620 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17621 return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, 17622 static_cast<VkPipelineCache>( pipelineCache ), 17623 createInfoCount, 17624 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), 17625 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17626 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 17627 } 17628 17629 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17630 template <typename PipelineAllocator, typename Dispatch> 17631 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17632 Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17633 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 17634 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17635 Dispatch const & d ) const 17636 { 17637 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17638 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17639 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 17640 # endif 17641 17642 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 17643 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 17644 m_device, 17645 static_cast<VkPipelineCache>( pipelineCache ), 17646 createInfos.size(), 17647 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 17648 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17649 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17650 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17651 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 17652 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17653 17654 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 17655 } 17656 17657 template <typename PipelineAllocator, 17658 typename Dispatch, 17659 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 17660 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const17661 Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17662 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 17663 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17664 PipelineAllocator & pipelineAllocator, 17665 Dispatch const & d ) const 17666 { 17667 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17668 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17669 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 17670 # endif 17671 17672 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 17673 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 17674 m_device, 17675 static_cast<VkPipelineCache>( pipelineCache ), 17676 createInfos.size(), 17677 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 17678 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17679 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17680 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17681 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 17682 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17683 17684 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); 17685 } 17686 17687 template <typename Dispatch> 17688 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17689 Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17690 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 17691 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17692 Dispatch const & d ) const 17693 { 17694 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17695 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17696 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 17697 # endif 17698 17699 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 17700 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 17701 m_device, 17702 static_cast<VkPipelineCache>( pipelineCache ), 17703 1, 17704 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 17705 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17706 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 17707 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17708 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", 17709 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17710 17711 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( result, std::move( pipeline ) ); 17712 } 17713 17714 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17715 template <typename Dispatch, typename PipelineAllocator> 17716 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17717 Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17718 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 17719 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17720 Dispatch const & d ) const 17721 { 17722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17723 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17724 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 17725 # endif 17726 17727 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 17728 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 17729 m_device, 17730 static_cast<VkPipelineCache>( pipelineCache ), 17731 createInfos.size(), 17732 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 17733 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17734 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17735 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17736 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 17737 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17738 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 17739 uniquePipelines.reserve( createInfos.size() ); 17740 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 17741 for ( auto const & pipeline : pipelines ) 17742 { 17743 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 17744 } 17745 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 17746 } 17747 17748 template < 17749 typename Dispatch, 17750 typename PipelineAllocator, 17751 typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 17752 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const17753 Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17754 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 17755 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17756 PipelineAllocator & pipelineAllocator, 17757 Dispatch const & d ) const 17758 { 17759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17760 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17761 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 17762 # endif 17763 17764 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 17765 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 17766 m_device, 17767 static_cast<VkPipelineCache>( pipelineCache ), 17768 createInfos.size(), 17769 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 17770 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17771 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17772 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17773 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 17774 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17775 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 17776 uniquePipelines.reserve( createInfos.size() ); 17777 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 17778 for ( auto const & pipeline : pipelines ) 17779 { 17780 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 17781 } 17782 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); 17783 } 17784 17785 template <typename Dispatch> 17786 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17787 Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17788 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 17789 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17790 Dispatch const & d ) const 17791 { 17792 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17793 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17794 VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); 17795 # endif 17796 17797 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 17798 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 17799 m_device, 17800 static_cast<VkPipelineCache>( pipelineCache ), 17801 1, 17802 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 17803 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17804 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 17805 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 17806 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", 17807 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17808 17809 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 17810 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 17811 } 17812 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17813 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17814 17815 template <typename Dispatch> getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const17816 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17817 uint32_t firstGroup, 17818 uint32_t groupCount, 17819 size_t dataSize, 17820 void * pData, 17821 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17822 { 17823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17824 return static_cast<Result>( 17825 d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 17826 } 17827 17828 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17829 template <typename DataType, typename DataTypeAllocator, typename Dispatch> getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const17830 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesNV( 17831 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 17832 { 17833 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17834 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17835 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && 17836 "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 17837 # endif 17838 17839 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 17840 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 17841 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV( 17842 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 17843 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); 17844 17845 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17846 } 17847 17848 template <typename DataType, typename Dispatch> 17849 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const17850 Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 17851 { 17852 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17853 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17854 VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && 17855 "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); 17856 # endif 17857 17858 DataType data; 17859 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV( 17860 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 17861 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); 17862 17863 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17864 } 17865 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17866 17867 template <typename Dispatch> getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,void * pData,Dispatch const & d) const17868 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 17869 size_t dataSize, 17870 void * pData, 17871 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17872 { 17873 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17874 return static_cast<Result>( 17875 d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) ); 17876 } 17877 17878 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17879 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 17880 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,Dispatch const & d) const17881 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const 17882 { 17883 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17884 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17885 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" ); 17886 # endif 17887 17888 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 17889 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 17890 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV( 17891 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 17892 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 17893 17894 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17895 } 17896 17897 template <typename DataType, typename Dispatch> 17898 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Dispatch const & d) const17899 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const 17900 { 17901 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17902 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17903 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" ); 17904 # endif 17905 17906 DataType data; 17907 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV( 17908 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 17909 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 17910 17911 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 17912 } 17913 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17914 17915 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const17916 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, 17917 const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, 17918 VULKAN_HPP_NAMESPACE::QueryType queryType, 17919 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 17920 uint32_t firstQuery, 17921 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17922 { 17923 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17924 d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, 17925 accelerationStructureCount, 17926 reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ), 17927 static_cast<VkQueryType>( queryType ), 17928 static_cast<VkQueryPool>( queryPool ), 17929 firstQuery ); 17930 } 17931 17932 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17933 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const17934 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( 17935 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, 17936 VULKAN_HPP_NAMESPACE::QueryType queryType, 17937 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 17938 uint32_t firstQuery, 17939 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17940 { 17941 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17942 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17943 VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesNV && 17944 "Function <vkCmdWriteAccelerationStructuresPropertiesNV> requires <VK_NV_ray_tracing>" ); 17945 # endif 17946 17947 d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, 17948 accelerationStructures.size(), 17949 reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ), 17950 static_cast<VkQueryType>( queryType ), 17951 static_cast<VkQueryPool>( queryPool ), 17952 firstQuery ); 17953 } 17954 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17955 17956 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 17957 template <typename Dispatch> compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const17958 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17959 uint32_t shader, 17960 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17961 { 17962 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17963 return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); 17964 } 17965 #else 17966 template <typename Dispatch> 17967 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const17968 Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const 17969 { 17970 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17971 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 17972 VULKAN_HPP_ASSERT( d.vkCompileDeferredNV && "Function <vkCompileDeferredNV> requires <VK_NV_ray_tracing>" ); 17973 # endif 17974 17975 VULKAN_HPP_NAMESPACE::Result result = 17976 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); 17977 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); 17978 17979 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 17980 } 17981 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17982 17983 //=== VK_KHR_maintenance3 === 17984 17985 template <typename Dispatch> getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const17986 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 17987 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 17988 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17989 { 17990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17991 d.vkGetDescriptorSetLayoutSupportKHR( 17992 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 17993 } 17994 17995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17996 template <typename Dispatch> 17997 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const17998 Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 17999 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18000 { 18001 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18002 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18003 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && 18004 "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 18005 # endif 18006 18007 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 18008 d.vkGetDescriptorSetLayoutSupportKHR( 18009 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 18010 18011 return support; 18012 } 18013 18014 template <typename X, typename Y, typename... Z, typename Dispatch> 18015 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const18016 Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 18017 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18018 { 18019 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18020 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18021 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && 18022 "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); 18023 # endif 18024 18025 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 18026 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 18027 d.vkGetDescriptorSetLayoutSupportKHR( 18028 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 18029 18030 return structureChain; 18031 } 18032 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18033 18034 //=== VK_KHR_draw_indirect_count === 18035 18036 template <typename Dispatch> drawIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const18037 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 18038 VULKAN_HPP_NAMESPACE::DeviceSize offset, 18039 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 18040 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 18041 uint32_t maxDrawCount, 18042 uint32_t stride, 18043 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18044 { 18045 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18046 d.vkCmdDrawIndirectCountKHR( m_commandBuffer, 18047 static_cast<VkBuffer>( buffer ), 18048 static_cast<VkDeviceSize>( offset ), 18049 static_cast<VkBuffer>( countBuffer ), 18050 static_cast<VkDeviceSize>( countBufferOffset ), 18051 maxDrawCount, 18052 stride ); 18053 } 18054 18055 template <typename Dispatch> drawIndexedIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const18056 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 18057 VULKAN_HPP_NAMESPACE::DeviceSize offset, 18058 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 18059 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 18060 uint32_t maxDrawCount, 18061 uint32_t stride, 18062 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18063 { 18064 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18065 d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, 18066 static_cast<VkBuffer>( buffer ), 18067 static_cast<VkDeviceSize>( offset ), 18068 static_cast<VkBuffer>( countBuffer ), 18069 static_cast<VkDeviceSize>( countBufferOffset ), 18070 maxDrawCount, 18071 stride ); 18072 } 18073 18074 //=== VK_EXT_external_memory_host === 18075 18076 template <typename Dispatch> 18077 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,Dispatch const & d) const18078 Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 18079 const void * pHostPointer, 18080 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, 18081 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18082 { 18083 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18084 return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, 18085 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 18086 pHostPointer, 18087 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) ); 18088 } 18089 18090 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18091 template <typename Dispatch> 18092 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,Dispatch const & d) const18093 Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 18094 const void * pHostPointer, 18095 Dispatch const & d ) const 18096 { 18097 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18098 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18099 VULKAN_HPP_ASSERT( d.vkGetMemoryHostPointerPropertiesEXT && "Function <vkGetMemoryHostPointerPropertiesEXT> requires <VK_EXT_external_memory_host>" ); 18100 # endif 18101 18102 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; 18103 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18104 d.vkGetMemoryHostPointerPropertiesEXT( m_device, 18105 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 18106 pHostPointer, 18107 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) ); 18108 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); 18109 18110 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryHostPointerProperties ) ); 18111 } 18112 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18113 18114 //=== VK_AMD_buffer_marker === 18115 18116 template <typename Dispatch> writeBufferMarkerAMD(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const18117 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 18118 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 18119 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 18120 uint32_t marker, 18121 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18122 { 18123 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18124 d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, 18125 static_cast<VkPipelineStageFlagBits>( pipelineStage ), 18126 static_cast<VkBuffer>( dstBuffer ), 18127 static_cast<VkDeviceSize>( dstOffset ), 18128 marker ); 18129 } 18130 18131 //=== VK_EXT_calibrated_timestamps === 18132 18133 template <typename Dispatch> getCalibrateableTimeDomainsEXT(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,Dispatch const & d) const18134 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, 18135 VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, 18136 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18137 { 18138 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18139 return static_cast<Result>( 18140 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); 18141 } 18142 18143 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18144 template <typename TimeDomainKHRAllocator, typename Dispatch> 18145 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const & d) const18146 PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const 18147 { 18148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18149 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18150 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && 18151 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18152 # endif 18153 18154 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; 18155 uint32_t timeDomainCount; 18156 VULKAN_HPP_NAMESPACE::Result result; 18157 do 18158 { 18159 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 18160 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 18161 { 18162 timeDomains.resize( timeDomainCount ); 18163 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18164 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 18165 } 18166 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18167 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 18168 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 18169 if ( timeDomainCount < timeDomains.size() ) 18170 { 18171 timeDomains.resize( timeDomainCount ); 18172 } 18173 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 18174 } 18175 18176 template <typename TimeDomainKHRAllocator, 18177 typename Dispatch, 18178 typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 18179 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsEXT(TimeDomainKHRAllocator & timeDomainKHRAllocator,Dispatch const & d) const18180 PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const 18181 { 18182 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18183 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18184 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && 18185 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18186 # endif 18187 18188 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); 18189 uint32_t timeDomainCount; 18190 VULKAN_HPP_NAMESPACE::Result result; 18191 do 18192 { 18193 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 18194 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 18195 { 18196 timeDomains.resize( timeDomainCount ); 18197 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18198 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 18199 } 18200 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18201 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 18202 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 18203 if ( timeDomainCount < timeDomains.size() ) 18204 { 18205 timeDomains.resize( timeDomainCount ); 18206 } 18207 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 18208 } 18209 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18210 18211 template <typename Dispatch> getCalibratedTimestampsEXT(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const18212 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, 18213 const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, 18214 uint64_t * pTimestamps, 18215 uint64_t * pMaxDeviation, 18216 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18217 { 18218 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18219 return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( 18220 m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); 18221 } 18222 18223 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18224 template <typename Uint64_tAllocator, typename Dispatch> 18225 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Dispatch const & d) const18226 Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 18227 Dispatch const & d ) const 18228 { 18229 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18230 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18231 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && 18232 "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18233 # endif 18234 18235 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 18236 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); 18237 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 18238 uint64_t & maxDeviation = data_.second; 18239 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT( 18240 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 18241 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 18242 18243 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 18244 } 18245 18246 template <typename Uint64_tAllocator, 18247 typename Dispatch, 18248 typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type> 18249 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const18250 Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 18251 Uint64_tAllocator & uint64_tAllocator, 18252 Dispatch const & d ) const 18253 { 18254 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18255 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18256 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && 18257 "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18258 # endif 18259 18260 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 18261 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); 18262 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 18263 uint64_t & maxDeviation = data_.second; 18264 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT( 18265 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 18266 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 18267 18268 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 18269 } 18270 18271 template <typename Dispatch> 18272 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type getCalibratedTimestampEXT(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo,Dispatch const & d) const18273 Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const 18274 { 18275 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18276 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18277 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && 18278 "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 18279 # endif 18280 18281 std::pair<uint64_t, uint64_t> data_; 18282 uint64_t & timestamp = data_.first; 18283 uint64_t & maxDeviation = data_.second; 18284 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18285 d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); 18286 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); 18287 18288 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 18289 } 18290 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18291 18292 //=== VK_NV_mesh_shader === 18293 18294 template <typename Dispatch> drawMeshTasksNV(uint32_t taskCount,uint32_t firstTask,Dispatch const & d) const18295 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18296 { 18297 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18298 d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); 18299 } 18300 18301 template <typename Dispatch> drawMeshTasksIndirectNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const18302 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 18303 VULKAN_HPP_NAMESPACE::DeviceSize offset, 18304 uint32_t drawCount, 18305 uint32_t stride, 18306 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18307 { 18308 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18309 d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 18310 } 18311 18312 template <typename Dispatch> drawMeshTasksIndirectCountNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const18313 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 18314 VULKAN_HPP_NAMESPACE::DeviceSize offset, 18315 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 18316 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 18317 uint32_t maxDrawCount, 18318 uint32_t stride, 18319 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18320 { 18321 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18322 d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, 18323 static_cast<VkBuffer>( buffer ), 18324 static_cast<VkDeviceSize>( offset ), 18325 static_cast<VkBuffer>( countBuffer ), 18326 static_cast<VkDeviceSize>( countBufferOffset ), 18327 maxDrawCount, 18328 stride ); 18329 } 18330 18331 //=== VK_NV_scissor_exclusive === 18332 18333 template <typename Dispatch> setExclusiveScissorEnableNV(uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables,Dispatch const & d) const18334 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, 18335 uint32_t exclusiveScissorCount, 18336 const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, 18337 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18338 { 18339 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18340 d.vkCmdSetExclusiveScissorEnableNV( 18341 m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) ); 18342 } 18343 18344 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18345 template <typename Dispatch> 18346 VULKAN_HPP_INLINE void setExclusiveScissorEnableNV(uint32_t firstExclusiveScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables,Dispatch const & d) const18347 CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, 18348 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables, 18349 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18350 { 18351 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18352 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18353 VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorEnableNV && "Function <vkCmdSetExclusiveScissorEnableNV> requires <VK_NV_scissor_exclusive>" ); 18354 # endif 18355 18356 d.vkCmdSetExclusiveScissorEnableNV( 18357 m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast<const VkBool32 *>( exclusiveScissorEnables.data() ) ); 18358 } 18359 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18360 18361 template <typename Dispatch> setExclusiveScissorNV(uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,Dispatch const & d) const18362 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 18363 uint32_t exclusiveScissorCount, 18364 const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, 18365 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18366 { 18367 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18368 d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) ); 18369 } 18370 18371 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18372 template <typename Dispatch> setExclusiveScissorNV(uint32_t firstExclusiveScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,Dispatch const & d) const18373 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 18374 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, 18375 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18376 { 18377 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18378 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18379 VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorNV && "Function <vkCmdSetExclusiveScissorNV> requires <VK_NV_scissor_exclusive>" ); 18380 # endif 18381 18382 d.vkCmdSetExclusiveScissorNV( 18383 m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) ); 18384 } 18385 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18386 18387 //=== VK_NV_device_diagnostic_checkpoints === 18388 18389 template <typename Dispatch> setCheckpointNV(const void * pCheckpointMarker,Dispatch const & d) const18390 VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18391 { 18392 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18393 d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); 18394 } 18395 18396 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18397 template <typename CheckpointMarkerType, typename Dispatch> setCheckpointNV(CheckpointMarkerType const & checkpointMarker,Dispatch const & d) const18398 VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18399 { 18400 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18401 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18402 VULKAN_HPP_ASSERT( d.vkCmdSetCheckpointNV && "Function <vkCmdSetCheckpointNV> requires <VK_NV_device_diagnostic_checkpoints>" ); 18403 # endif 18404 18405 d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) ); 18406 } 18407 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18408 18409 template <typename Dispatch> getCheckpointDataNV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,Dispatch const & d) const18410 VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, 18411 VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, 18412 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18413 { 18414 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18415 d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) ); 18416 } 18417 18418 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18419 template <typename CheckpointDataNVAllocator, typename Dispatch> 18420 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(Dispatch const & d) const18421 Queue::getCheckpointDataNV( Dispatch const & d ) const 18422 { 18423 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18424 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18425 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" ); 18426 # endif 18427 18428 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData; 18429 uint32_t checkpointDataCount; 18430 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 18431 checkpointData.resize( checkpointDataCount ); 18432 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 18433 18434 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 18435 if ( checkpointDataCount < checkpointData.size() ) 18436 { 18437 checkpointData.resize( checkpointDataCount ); 18438 } 18439 return checkpointData; 18440 } 18441 18442 template <typename CheckpointDataNVAllocator, 18443 typename Dispatch, 18444 typename std::enable_if<std::is_same<typename CheckpointDataNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, int>::type> 18445 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(CheckpointDataNVAllocator & checkpointDataNVAllocator,Dispatch const & d) const18446 Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const 18447 { 18448 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18449 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18450 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" ); 18451 # endif 18452 18453 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator ); 18454 uint32_t checkpointDataCount; 18455 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 18456 checkpointData.resize( checkpointDataCount ); 18457 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 18458 18459 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 18460 if ( checkpointDataCount < checkpointData.size() ) 18461 { 18462 checkpointData.resize( checkpointDataCount ); 18463 } 18464 return checkpointData; 18465 } 18466 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18467 18468 //=== VK_KHR_timeline_semaphore === 18469 18470 template <typename Dispatch> getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const18471 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 18472 uint64_t * pValue, 18473 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18474 { 18475 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18476 return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); 18477 } 18478 18479 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18480 template <typename Dispatch> 18481 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const18482 Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const 18483 { 18484 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18485 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18486 VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValueKHR && "Function <vkGetSemaphoreCounterValueKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 18487 # endif 18488 18489 uint64_t value; 18490 VULKAN_HPP_NAMESPACE::Result result = 18491 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 18492 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); 18493 18494 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); 18495 } 18496 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18497 18498 template <typename Dispatch> waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const18499 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 18500 uint64_t timeout, 18501 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18502 { 18503 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18504 return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 18505 } 18506 18507 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18508 template <typename Dispatch> 18509 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const18510 Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const 18511 { 18512 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18513 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18514 VULKAN_HPP_ASSERT( d.vkWaitSemaphoresKHR && "Function <vkWaitSemaphoresKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 18515 # endif 18516 18517 VULKAN_HPP_NAMESPACE::Result result = 18518 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 18519 VULKAN_HPP_NAMESPACE::detail::resultCheck( 18520 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 18521 18522 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 18523 } 18524 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18525 18526 template <typename Dispatch> signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const18527 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, 18528 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18529 { 18530 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18531 return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 18532 } 18533 18534 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18535 template <typename Dispatch> 18536 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,Dispatch const & d) const18537 Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 18538 { 18539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18540 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18541 VULKAN_HPP_ASSERT( d.vkSignalSemaphoreKHR && "Function <vkSignalSemaphoreKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); 18542 # endif 18543 18544 VULKAN_HPP_NAMESPACE::Result result = 18545 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 18546 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); 18547 18548 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18549 } 18550 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18551 18552 //=== VK_INTEL_performance_query === 18553 18554 template <typename Dispatch> initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,Dispatch const & d) const18555 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( 18556 const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18557 { 18558 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18559 return static_cast<Result>( 18560 d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) ); 18561 } 18562 18563 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18564 template <typename Dispatch> 18565 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo,Dispatch const & d) const18566 Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const 18567 { 18568 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18569 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18570 VULKAN_HPP_ASSERT( d.vkInitializePerformanceApiINTEL && "Function <vkInitializePerformanceApiINTEL> requires <VK_INTEL_performance_query>" ); 18571 # endif 18572 18573 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18574 d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) ); 18575 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); 18576 18577 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18578 } 18579 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18580 18581 template <typename Dispatch> uninitializePerformanceApiINTEL(Dispatch const & d) const18582 VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18583 { 18584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18585 d.vkUninitializePerformanceApiINTEL( m_device ); 18586 } 18587 18588 template <typename Dispatch> setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const18589 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, 18590 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18591 { 18592 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18593 return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) ); 18594 } 18595 18596 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18597 template <typename Dispatch> 18598 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo,Dispatch const & d) const18599 CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const 18600 { 18601 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18602 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18603 VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceMarkerINTEL && "Function <vkCmdSetPerformanceMarkerINTEL> requires <VK_INTEL_performance_query>" ); 18604 # endif 18605 18606 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18607 d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) ); 18608 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); 18609 18610 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18611 } 18612 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18613 18614 template <typename Dispatch> setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const18615 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( 18616 const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18617 { 18618 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18619 return static_cast<Result>( 18620 d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) ); 18621 } 18622 18623 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18624 template <typename Dispatch> 18625 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo,Dispatch const & d) const18626 CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const 18627 { 18628 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18629 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18630 VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceStreamMarkerINTEL && "Function <vkCmdSetPerformanceStreamMarkerINTEL> requires <VK_INTEL_performance_query>" ); 18631 # endif 18632 18633 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18634 d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) ); 18635 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); 18636 18637 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18638 } 18639 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18640 18641 template <typename Dispatch> setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,Dispatch const & d) const18642 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( 18643 const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18644 { 18645 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18646 return static_cast<Result>( 18647 d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) ); 18648 } 18649 18650 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18651 template <typename Dispatch> 18652 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo,Dispatch const & d) const18653 CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const 18654 { 18655 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18656 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18657 VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceOverrideINTEL && "Function <vkCmdSetPerformanceOverrideINTEL> requires <VK_INTEL_performance_query>" ); 18658 # endif 18659 18660 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18661 d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) ); 18662 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); 18663 18664 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18665 } 18666 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18667 18668 template <typename Dispatch> 18669 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,Dispatch const & d) const18670 Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, 18671 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, 18672 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18673 { 18674 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18675 return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, 18676 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), 18677 reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) ); 18678 } 18679 18680 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18681 template <typename Dispatch> 18682 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const18683 Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const 18684 { 18685 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18686 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18687 VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 18688 # endif 18689 18690 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 18691 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18692 d.vkAcquirePerformanceConfigurationINTEL( m_device, 18693 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 18694 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 18695 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); 18696 18697 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( configuration ) ); 18698 } 18699 18700 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18701 template <typename Dispatch> 18702 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type acquirePerformanceConfigurationINTELUnique(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const18703 Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, 18704 Dispatch const & d ) const 18705 { 18706 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18707 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18708 VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 18709 # endif 18710 18711 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 18712 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18713 d.vkAcquirePerformanceConfigurationINTEL( m_device, 18714 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 18715 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 18716 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); 18717 18718 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 18719 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( configuration, ObjectRelease<Device, Dispatch>( *this, d ) ) ); 18720 } 18721 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18722 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18723 18724 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 18725 template <typename Dispatch> releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const18726 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 18727 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18728 { 18729 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18730 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 18731 } 18732 #else 18733 template <typename Dispatch> 18734 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const18735 Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 18736 { 18737 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18738 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18739 VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 18740 # endif 18741 18742 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18743 d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 18744 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); 18745 18746 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18747 } 18748 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18749 18750 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 18751 template <typename Dispatch> release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const18752 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 18753 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18754 { 18755 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18756 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 18757 } 18758 #else 18759 template <typename Dispatch> 18760 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const18761 Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 18762 { 18763 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18764 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18765 VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 18766 # endif 18767 18768 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18769 d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 18770 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); 18771 18772 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18773 } 18774 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18775 18776 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 18777 template <typename Dispatch> setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const18778 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 18779 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18780 { 18781 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18782 return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 18783 } 18784 #else 18785 template <typename Dispatch> 18786 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const18787 Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 18788 { 18789 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18790 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18791 VULKAN_HPP_ASSERT( d.vkQueueSetPerformanceConfigurationINTEL && 18792 "Function <vkQueueSetPerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); 18793 # endif 18794 18795 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18796 d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 18797 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); 18798 18799 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 18800 } 18801 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18802 18803 template <typename Dispatch> getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,Dispatch const & d) const18804 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, 18805 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, 18806 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18807 { 18808 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18809 return static_cast<Result>( d.vkGetPerformanceParameterINTEL( 18810 m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) ); 18811 } 18812 18813 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18814 template <typename Dispatch> 18815 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,Dispatch const & d) const18816 Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const 18817 { 18818 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18819 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18820 VULKAN_HPP_ASSERT( d.vkGetPerformanceParameterINTEL && "Function <vkGetPerformanceParameterINTEL> requires <VK_INTEL_performance_query>" ); 18821 # endif 18822 18823 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; 18824 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPerformanceParameterINTEL( 18825 m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) ); 18826 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); 18827 18828 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); 18829 } 18830 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18831 18832 //=== VK_AMD_display_native_hdr === 18833 18834 template <typename Dispatch> setLocalDimmingAMD(VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,Dispatch const & d) const18835 VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, 18836 VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, 18837 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18838 { 18839 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18840 d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) ); 18841 } 18842 18843 #if defined( VK_USE_PLATFORM_FUCHSIA ) 18844 //=== VK_FUCHSIA_imagepipe_surface === 18845 18846 template <typename Dispatch> 18847 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createImagePipeSurfaceFUCHSIA(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const18848 Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, 18849 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18850 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 18851 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18852 { 18853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18854 return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, 18855 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), 18856 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 18857 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 18858 } 18859 18860 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18861 template <typename Dispatch> 18862 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18863 Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 18864 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18865 Dispatch const & d ) const 18866 { 18867 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18868 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18869 VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" ); 18870 # endif 18871 18872 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 18873 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 18874 m_instance, 18875 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 18876 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18877 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 18878 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); 18879 18880 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 18881 } 18882 18883 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18884 template <typename Dispatch> 18885 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18886 Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 18887 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18888 Dispatch const & d ) const 18889 { 18890 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18891 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18892 VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" ); 18893 # endif 18894 18895 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 18896 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 18897 m_instance, 18898 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 18899 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18900 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 18901 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); 18902 18903 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 18904 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 18905 } 18906 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18907 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18908 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 18909 18910 #if defined( VK_USE_PLATFORM_METAL_EXT ) 18911 //=== VK_EXT_metal_surface === 18912 18913 template <typename Dispatch> createMetalSurfaceEXT(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const18914 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, 18915 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18916 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 18917 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18918 { 18919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18920 return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, 18921 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), 18922 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 18923 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 18924 } 18925 18926 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18927 template <typename Dispatch> 18928 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18929 Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, 18930 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18931 Dispatch const & d ) const 18932 { 18933 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18934 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18935 VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" ); 18936 # endif 18937 18938 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 18939 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18940 d.vkCreateMetalSurfaceEXT( m_instance, 18941 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 18942 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18943 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 18944 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); 18945 18946 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 18947 } 18948 18949 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18950 template <typename Dispatch> 18951 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMetalSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18952 Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, 18953 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18954 Dispatch const & d ) const 18955 { 18956 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18957 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18958 VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" ); 18959 # endif 18960 18961 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 18962 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18963 d.vkCreateMetalSurfaceEXT( m_instance, 18964 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 18965 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18966 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 18967 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); 18968 18969 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 18970 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 18971 } 18972 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18973 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18974 #endif /*VK_USE_PLATFORM_METAL_EXT*/ 18975 18976 //=== VK_KHR_fragment_shading_rate === 18977 18978 template <typename Dispatch> 18979 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getFragmentShadingRatesKHR(uint32_t * pFragmentShadingRateCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,Dispatch const & d) const18980 PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, 18981 VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, 18982 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18983 { 18984 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18985 return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 18986 m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) ); 18987 } 18988 18989 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18990 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch> 18991 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 18992 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(Dispatch const & d) const18993 PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const 18994 { 18995 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18996 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 18997 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && 18998 "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" ); 18999 # endif 19000 19001 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates; 19002 uint32_t fragmentShadingRateCount; 19003 VULKAN_HPP_NAMESPACE::Result result; 19004 do 19005 { 19006 result = 19007 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 19008 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) 19009 { 19010 fragmentShadingRates.resize( fragmentShadingRateCount ); 19011 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 19012 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 19013 } 19014 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19015 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 19016 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 19017 if ( fragmentShadingRateCount < fragmentShadingRates.size() ) 19018 { 19019 fragmentShadingRates.resize( fragmentShadingRateCount ); 19020 } 19021 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); 19022 } 19023 19024 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, 19025 typename Dispatch, 19026 typename std::enable_if<std::is_same<typename PhysicalDeviceFragmentShadingRateKHRAllocator::value_type, 19027 VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value, 19028 int>::type> 19029 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19030 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,Dispatch const & d) const19031 PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, 19032 Dispatch const & d ) const 19033 { 19034 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19035 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19036 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && 19037 "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" ); 19038 # endif 19039 19040 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates( 19041 physicalDeviceFragmentShadingRateKHRAllocator ); 19042 uint32_t fragmentShadingRateCount; 19043 VULKAN_HPP_NAMESPACE::Result result; 19044 do 19045 { 19046 result = 19047 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 19048 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) 19049 { 19050 fragmentShadingRates.resize( fragmentShadingRateCount ); 19051 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 19052 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 19053 } 19054 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19055 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 19056 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 19057 if ( fragmentShadingRateCount < fragmentShadingRates.size() ) 19058 { 19059 fragmentShadingRates.resize( fragmentShadingRateCount ); 19060 } 19061 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); 19062 } 19063 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19064 19065 template <typename Dispatch> setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const19066 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, 19067 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 19068 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19069 { 19070 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19071 d.vkCmdSetFragmentShadingRateKHR( 19072 m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 19073 } 19074 19075 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19076 template <typename Dispatch> setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const19077 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, 19078 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 19079 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19080 { 19081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19082 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19083 VULKAN_HPP_ASSERT( d.vkCmdSetFragmentShadingRateKHR && "Function <vkCmdSetFragmentShadingRateKHR> requires <VK_KHR_fragment_shading_rate>" ); 19084 # endif 19085 19086 d.vkCmdSetFragmentShadingRateKHR( 19087 m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 19088 } 19089 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19090 19091 //=== VK_KHR_dynamic_rendering_local_read === 19092 19093 template <typename Dispatch> setRenderingAttachmentLocationsKHR(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR * pLocationInfo,Dispatch const & d) const19094 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR * pLocationInfo, 19095 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19096 { 19097 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19098 d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfoKHR *>( pLocationInfo ) ); 19099 } 19100 19101 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19102 template <typename Dispatch> setRenderingAttachmentLocationsKHR(const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR & locationInfo,Dispatch const & d) const19103 VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR & locationInfo, 19104 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19105 { 19106 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19107 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19108 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocationsKHR && 19109 "Function <vkCmdSetRenderingAttachmentLocationsKHR> requires <VK_KHR_dynamic_rendering_local_read>" ); 19110 # endif 19111 19112 d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfoKHR *>( &locationInfo ) ); 19113 } 19114 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19115 19116 template <typename Dispatch> setRenderingInputAttachmentIndicesKHR(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR * pLocationInfo,Dispatch const & d) const19117 VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR * pLocationInfo, 19118 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19119 { 19120 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19121 d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfoKHR *>( pLocationInfo ) ); 19122 } 19123 19124 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19125 template <typename Dispatch> setRenderingInputAttachmentIndicesKHR(const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & locationInfo,Dispatch const & d) const19126 VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & locationInfo, 19127 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19128 { 19129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19130 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19131 VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndicesKHR && 19132 "Function <vkCmdSetRenderingInputAttachmentIndicesKHR> requires <VK_KHR_dynamic_rendering_local_read>" ); 19133 # endif 19134 19135 d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfoKHR *>( &locationInfo ) ); 19136 } 19137 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19138 19139 //=== VK_EXT_buffer_device_address === 19140 19141 template <typename Dispatch> getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const19142 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 19143 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19144 { 19145 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19146 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 19147 } 19148 19149 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19150 template <typename Dispatch> getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const19151 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 19152 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19153 { 19154 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19155 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19156 VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressEXT && 19157 "Function <vkGetBufferDeviceAddressEXT> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 19158 # endif 19159 19160 VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 19161 19162 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 19163 } 19164 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19165 19166 //=== VK_EXT_tooling_info === 19167 19168 template <typename Dispatch> getToolPropertiesEXT(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,Dispatch const & d) const19169 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, 19170 VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, 19171 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19172 { 19173 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19174 return static_cast<Result>( 19175 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); 19176 } 19177 19178 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19179 template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch> 19180 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19181 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT(Dispatch const & d) const19182 PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const 19183 { 19184 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19185 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19186 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && 19187 "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 19188 # endif 19189 19190 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; 19191 uint32_t toolCount; 19192 VULKAN_HPP_NAMESPACE::Result result; 19193 do 19194 { 19195 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 19196 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 19197 { 19198 toolProperties.resize( toolCount ); 19199 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19200 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 19201 } 19202 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19203 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 19204 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 19205 if ( toolCount < toolProperties.size() ) 19206 { 19207 toolProperties.resize( toolCount ); 19208 } 19209 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 19210 } 19211 19212 template < 19213 typename PhysicalDeviceToolPropertiesAllocator, 19214 typename Dispatch, 19215 typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, 19216 int>::type> 19217 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19218 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT(PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,Dispatch const & d) const19219 PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const 19220 { 19221 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19222 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19223 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && 19224 "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); 19225 # endif 19226 19227 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( 19228 physicalDeviceToolPropertiesAllocator ); 19229 uint32_t toolCount; 19230 VULKAN_HPP_NAMESPACE::Result result; 19231 do 19232 { 19233 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 19234 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 19235 { 19236 toolProperties.resize( toolCount ); 19237 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19238 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 19239 } 19240 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19241 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 19242 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 19243 if ( toolCount < toolProperties.size() ) 19244 { 19245 toolProperties.resize( toolCount ); 19246 } 19247 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); 19248 } 19249 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19250 19251 //=== VK_KHR_present_wait === 19252 19253 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19254 template <typename Dispatch> waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t presentId,uint64_t timeout,Dispatch const & d) const19255 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 19256 uint64_t presentId, 19257 uint64_t timeout, 19258 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19259 { 19260 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19261 return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); 19262 } 19263 #else 19264 template <typename Dispatch> 19265 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t presentId,uint64_t timeout,Dispatch const & d) const19266 Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const 19267 { 19268 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19269 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19270 VULKAN_HPP_ASSERT( d.vkWaitForPresentKHR && "Function <vkWaitForPresentKHR> requires <VK_KHR_present_wait>" ); 19271 # endif 19272 19273 VULKAN_HPP_NAMESPACE::Result result = 19274 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); 19275 VULKAN_HPP_NAMESPACE::detail::resultCheck( 19276 result, 19277 VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", 19278 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 19279 19280 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 19281 } 19282 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 19283 19284 //=== VK_NV_cooperative_matrix === 19285 19286 template <typename Dispatch> getCooperativeMatrixPropertiesNV(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,Dispatch const & d) const19287 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( 19288 uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19289 { 19290 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19291 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 19292 m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) ); 19293 } 19294 19295 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19296 template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch> 19297 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19298 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const & d) const19299 PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const 19300 { 19301 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19302 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19303 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && 19304 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" ); 19305 # endif 19306 19307 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties; 19308 uint32_t propertyCount; 19309 VULKAN_HPP_NAMESPACE::Result result; 19310 do 19311 { 19312 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 19313 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 19314 { 19315 properties.resize( propertyCount ); 19316 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 19317 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 19318 } 19319 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19320 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 19321 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 19322 if ( propertyCount < properties.size() ) 19323 { 19324 properties.resize( propertyCount ); 19325 } 19326 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 19327 } 19328 19329 template <typename CooperativeMatrixPropertiesNVAllocator, 19330 typename Dispatch, 19331 typename std::enable_if< 19332 std::is_same<typename CooperativeMatrixPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, 19333 int>::type> 19334 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19335 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,Dispatch const & d) const19336 PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, 19337 Dispatch const & d ) const 19338 { 19339 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19340 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19341 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && 19342 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" ); 19343 # endif 19344 19345 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties( 19346 cooperativeMatrixPropertiesNVAllocator ); 19347 uint32_t propertyCount; 19348 VULKAN_HPP_NAMESPACE::Result result; 19349 do 19350 { 19351 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 19352 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 19353 { 19354 properties.resize( propertyCount ); 19355 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 19356 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 19357 } 19358 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19359 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 19360 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 19361 if ( propertyCount < properties.size() ) 19362 { 19363 properties.resize( propertyCount ); 19364 } 19365 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 19366 } 19367 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19368 19369 //=== VK_NV_coverage_reduction_mode === 19370 19371 template <typename Dispatch> getSupportedFramebufferMixedSamplesCombinationsNV(uint32_t * pCombinationCount,VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,Dispatch const & d) const19372 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 19373 uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19374 { 19375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19376 return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 19377 m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) ); 19378 } 19379 19380 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19381 template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch> 19382 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19383 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const & d) const19384 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const 19385 { 19386 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19387 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19388 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && 19389 "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" ); 19390 # endif 19391 19392 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations; 19393 uint32_t combinationCount; 19394 VULKAN_HPP_NAMESPACE::Result result; 19395 do 19396 { 19397 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19398 d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); 19399 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) 19400 { 19401 combinations.resize( combinationCount ); 19402 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 19403 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 19404 } 19405 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19406 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 19407 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 19408 if ( combinationCount < combinations.size() ) 19409 { 19410 combinations.resize( combinationCount ); 19411 } 19412 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); 19413 } 19414 19415 template <typename FramebufferMixedSamplesCombinationNVAllocator, 19416 typename Dispatch, 19417 typename std::enable_if<std::is_same<typename FramebufferMixedSamplesCombinationNVAllocator::value_type, 19418 VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value, 19419 int>::type> 19420 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19421 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,Dispatch const & d) const19422 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 19423 FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const 19424 { 19425 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19426 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19427 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && 19428 "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" ); 19429 # endif 19430 19431 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations( 19432 framebufferMixedSamplesCombinationNVAllocator ); 19433 uint32_t combinationCount; 19434 VULKAN_HPP_NAMESPACE::Result result; 19435 do 19436 { 19437 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19438 d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); 19439 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) 19440 { 19441 combinations.resize( combinationCount ); 19442 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 19443 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 19444 } 19445 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19446 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 19447 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 19448 if ( combinationCount < combinations.size() ) 19449 { 19450 combinations.resize( combinationCount ); 19451 } 19452 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); 19453 } 19454 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19455 19456 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 19457 //=== VK_EXT_full_screen_exclusive === 19458 19459 template <typename Dispatch> 19460 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const19461 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 19462 uint32_t * pPresentModeCount, 19463 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 19464 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19465 { 19466 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19467 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 19468 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 19469 pPresentModeCount, 19470 reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 19471 } 19472 19473 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19474 template <typename PresentModeKHRAllocator, typename Dispatch> 19475 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const19476 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 19477 { 19478 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19479 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19480 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && 19481 "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); 19482 # endif 19483 19484 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes; 19485 uint32_t presentModeCount; 19486 VULKAN_HPP_NAMESPACE::Result result; 19487 do 19488 { 19489 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 19490 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); 19491 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 19492 { 19493 presentModes.resize( presentModeCount ); 19494 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19495 d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 19496 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 19497 &presentModeCount, 19498 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 19499 } 19500 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19501 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 19502 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 19503 if ( presentModeCount < presentModes.size() ) 19504 { 19505 presentModes.resize( presentModeCount ); 19506 } 19507 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 19508 } 19509 19510 template <typename PresentModeKHRAllocator, 19511 typename Dispatch, 19512 typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 19513 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const19514 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 19515 PresentModeKHRAllocator & presentModeKHRAllocator, 19516 Dispatch const & d ) const 19517 { 19518 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19519 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19520 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && 19521 "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); 19522 # endif 19523 19524 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 19525 uint32_t presentModeCount; 19526 VULKAN_HPP_NAMESPACE::Result result; 19527 do 19528 { 19529 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 19530 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); 19531 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 19532 { 19533 presentModes.resize( presentModeCount ); 19534 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19535 d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 19536 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 19537 &presentModeCount, 19538 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 19539 } 19540 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19541 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 19542 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 19543 if ( presentModeCount < presentModes.size() ) 19544 { 19545 presentModes.resize( presentModeCount ); 19546 } 19547 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); 19548 } 19549 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19550 19551 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19552 template <typename Dispatch> acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const19553 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 19554 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19555 { 19556 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19557 return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 19558 } 19559 # else 19560 template <typename Dispatch> 19561 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const19562 Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 19563 { 19564 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19565 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19566 VULKAN_HPP_ASSERT( d.vkAcquireFullScreenExclusiveModeEXT && "Function <vkAcquireFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" ); 19567 # endif 19568 19569 VULKAN_HPP_NAMESPACE::Result result = 19570 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 19571 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); 19572 19573 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19574 } 19575 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 19576 19577 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19578 template <typename Dispatch> releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const19579 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 19580 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19581 { 19582 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19583 return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 19584 } 19585 # else 19586 template <typename Dispatch> 19587 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const19588 Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 19589 { 19590 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19591 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19592 VULKAN_HPP_ASSERT( d.vkReleaseFullScreenExclusiveModeEXT && "Function <vkReleaseFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" ); 19593 # endif 19594 19595 VULKAN_HPP_NAMESPACE::Result result = 19596 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 19597 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); 19598 19599 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 19600 } 19601 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 19602 19603 template <typename Dispatch> 19604 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getGroupSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const19605 Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 19606 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 19607 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19608 { 19609 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19610 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( 19611 m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 19612 } 19613 19614 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19615 template <typename Dispatch> 19616 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const19617 Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 19618 { 19619 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19620 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19621 VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModes2EXT && 19622 "Function <vkGetDeviceGroupSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); 19623 # endif 19624 19625 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 19626 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( 19627 m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 19628 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); 19629 19630 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); 19631 } 19632 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19633 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 19634 19635 //=== VK_EXT_headless_surface === 19636 19637 template <typename Dispatch> createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const19638 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, 19639 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19640 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 19641 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19642 { 19643 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19644 return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, 19645 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), 19646 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 19647 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 19648 } 19649 19650 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19651 template <typename Dispatch> 19652 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19653 Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, 19654 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19655 Dispatch const & d ) const 19656 { 19657 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19658 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19659 VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" ); 19660 # endif 19661 19662 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 19663 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT( 19664 m_instance, 19665 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 19666 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19667 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 19668 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); 19669 19670 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 19671 } 19672 19673 # ifndef VULKAN_HPP_NO_SMART_HANDLE 19674 template <typename Dispatch> 19675 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createHeadlessSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19676 Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, 19677 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19678 Dispatch const & d ) const 19679 { 19680 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19681 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19682 VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" ); 19683 # endif 19684 19685 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 19686 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT( 19687 m_instance, 19688 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 19689 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19690 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 19691 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); 19692 19693 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 19694 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 19695 } 19696 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 19697 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19698 19699 //=== VK_KHR_buffer_device_address === 19700 19701 template <typename Dispatch> getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const19702 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 19703 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19704 { 19705 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19706 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 19707 } 19708 19709 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19710 template <typename Dispatch> getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const19711 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 19712 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19713 { 19714 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19715 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19716 VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressKHR && 19717 "Function <vkGetBufferDeviceAddressKHR> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 19718 # endif 19719 19720 VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 19721 19722 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 19723 } 19724 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19725 19726 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const19727 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 19728 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19729 { 19730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19731 return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 19732 } 19733 19734 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19735 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const19736 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 19737 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19738 { 19739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19740 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19741 VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddressKHR && 19742 "Function <vkGetBufferOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 19743 # endif 19744 19745 uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 19746 19747 return result; 19748 } 19749 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19750 19751 template <typename Dispatch> getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const19752 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 19753 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19754 { 19755 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19756 return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 19757 } 19758 19759 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19760 template <typename Dispatch> getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const19761 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, 19762 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19763 { 19764 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19765 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19766 VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddressKHR && 19767 "Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); 19768 # endif 19769 19770 uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 19771 19772 return result; 19773 } 19774 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19775 19776 //=== VK_EXT_line_rasterization === 19777 19778 template <typename Dispatch> 19779 VULKAN_HPP_INLINE void setLineStippleEXT(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const19780 CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19781 { 19782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19783 d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); 19784 } 19785 19786 //=== VK_EXT_host_query_reset === 19787 19788 template <typename Dispatch> resetQueryPoolEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const19789 VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 19790 uint32_t firstQuery, 19791 uint32_t queryCount, 19792 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19793 { 19794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19795 d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 19796 } 19797 19798 //=== VK_EXT_extended_dynamic_state === 19799 19800 template <typename Dispatch> setCullModeEXT(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const19801 VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19802 { 19803 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19804 d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); 19805 } 19806 19807 template <typename Dispatch> setFrontFaceEXT(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const19808 VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19809 { 19810 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19811 d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); 19812 } 19813 19814 template <typename Dispatch> setPrimitiveTopologyEXT(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const19815 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, 19816 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19817 { 19818 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19819 d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); 19820 } 19821 19822 template <typename Dispatch> setViewportWithCountEXT(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const19823 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, 19824 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 19825 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19826 { 19827 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19828 d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 19829 } 19830 19831 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19832 template <typename Dispatch> setViewportWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const19833 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 19834 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19835 { 19836 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19837 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19838 VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCountEXT && 19839 "Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 19840 # endif 19841 19842 d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 19843 } 19844 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19845 19846 template <typename Dispatch> 19847 VULKAN_HPP_INLINE void setScissorWithCountEXT(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const19848 CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19849 { 19850 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19851 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 19852 } 19853 19854 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19855 template <typename Dispatch> setScissorWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const19856 VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 19857 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19858 { 19859 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19860 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19861 VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCountEXT && 19862 "Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 19863 # endif 19864 19865 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 19866 } 19867 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19868 19869 template <typename Dispatch> bindVertexBuffers2EXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const19870 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 19871 uint32_t bindingCount, 19872 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 19873 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 19874 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 19875 const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, 19876 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19877 { 19878 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19879 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, 19880 firstBinding, 19881 bindingCount, 19882 reinterpret_cast<const VkBuffer *>( pBuffers ), 19883 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 19884 reinterpret_cast<const VkDeviceSize *>( pSizes ), 19885 reinterpret_cast<const VkDeviceSize *>( pStrides ) ); 19886 } 19887 19888 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19889 template <typename Dispatch> bindVertexBuffers2EXT(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const19890 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 19891 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 19892 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 19893 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 19894 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, 19895 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 19896 { 19897 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19898 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 19899 VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2EXT && 19900 "Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); 19901 # endif 19902 # ifdef VULKAN_HPP_NO_EXCEPTIONS 19903 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 19904 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 19905 VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); 19906 # else 19907 if ( buffers.size() != offsets.size() ) 19908 { 19909 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); 19910 } 19911 if ( !sizes.empty() && buffers.size() != sizes.size() ) 19912 { 19913 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); 19914 } 19915 if ( !strides.empty() && buffers.size() != strides.size() ) 19916 { 19917 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); 19918 } 19919 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 19920 19921 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, 19922 firstBinding, 19923 buffers.size(), 19924 reinterpret_cast<const VkBuffer *>( buffers.data() ), 19925 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 19926 reinterpret_cast<const VkDeviceSize *>( sizes.data() ), 19927 reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); 19928 } 19929 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19930 19931 template <typename Dispatch> setDepthTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const19932 VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19933 { 19934 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19935 d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); 19936 } 19937 19938 template <typename Dispatch> setDepthWriteEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const19939 VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19940 { 19941 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19942 d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); 19943 } 19944 19945 template <typename Dispatch> setDepthCompareOpEXT(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const19946 VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19947 { 19948 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19949 d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); 19950 } 19951 19952 template <typename Dispatch> setDepthBoundsTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const19953 VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, 19954 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19955 { 19956 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19957 d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); 19958 } 19959 19960 template <typename Dispatch> setStencilTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const19961 VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19962 { 19963 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19964 d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); 19965 } 19966 19967 template <typename Dispatch> setStencilOpEXT(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const19968 VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 19969 VULKAN_HPP_NAMESPACE::StencilOp failOp, 19970 VULKAN_HPP_NAMESPACE::StencilOp passOp, 19971 VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, 19972 VULKAN_HPP_NAMESPACE::CompareOp compareOp, 19973 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19974 { 19975 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19976 d.vkCmdSetStencilOpEXT( m_commandBuffer, 19977 static_cast<VkStencilFaceFlags>( faceMask ), 19978 static_cast<VkStencilOp>( failOp ), 19979 static_cast<VkStencilOp>( passOp ), 19980 static_cast<VkStencilOp>( depthFailOp ), 19981 static_cast<VkCompareOp>( compareOp ) ); 19982 } 19983 19984 //=== VK_KHR_deferred_host_operations === 19985 19986 template <typename Dispatch> createDeferredOperationKHR(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,Dispatch const & d) const19987 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19988 VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, 19989 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19990 { 19991 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19992 return static_cast<Result>( d.vkCreateDeferredOperationKHR( 19993 m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) ); 19994 } 19995 19996 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19997 template <typename Dispatch> 19998 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19999 Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 20000 { 20001 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20002 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20003 VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 20004 # endif 20005 20006 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 20007 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR( 20008 m_device, 20009 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20010 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 20011 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); 20012 20013 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deferredOperation ) ); 20014 } 20015 20016 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20017 template <typename Dispatch> 20018 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type createDeferredOperationKHRUnique(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20019 Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 20020 { 20021 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20022 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20023 VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 20024 # endif 20025 20026 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 20027 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR( 20028 m_device, 20029 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20030 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 20031 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); 20032 20033 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 20034 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( deferredOperation, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 20035 } 20036 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20037 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20038 20039 template <typename Dispatch> destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const20040 VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20041 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20042 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20043 { 20044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20045 d.vkDestroyDeferredOperationKHR( 20046 m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 20047 } 20048 20049 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20050 template <typename Dispatch> destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20051 VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20052 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20053 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20054 { 20055 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20056 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20057 VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 20058 # endif 20059 20060 d.vkDestroyDeferredOperationKHR( 20061 m_device, 20062 static_cast<VkDeferredOperationKHR>( operation ), 20063 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 20064 } 20065 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20066 20067 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const20068 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20069 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20070 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20071 { 20072 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20073 d.vkDestroyDeferredOperationKHR( 20074 m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 20075 } 20076 20077 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20078 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20079 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20080 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20081 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20082 { 20083 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20084 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20085 VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); 20086 # endif 20087 20088 d.vkDestroyDeferredOperationKHR( 20089 m_device, 20090 static_cast<VkDeferredOperationKHR>( operation ), 20091 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 20092 } 20093 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20094 20095 template <typename Dispatch> getDeferredOperationMaxConcurrencyKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20096 VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20097 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20098 { 20099 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20100 return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ); 20101 } 20102 20103 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20104 template <typename Dispatch> getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20105 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20107 { 20108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20109 return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 20110 } 20111 #else 20112 template <typename Dispatch> 20113 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20114 Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20115 { 20116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20117 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20118 VULKAN_HPP_ASSERT( d.vkGetDeferredOperationResultKHR && "Function <vkGetDeferredOperationResultKHR> requires <VK_KHR_deferred_host_operations>" ); 20119 # endif 20120 20121 VULKAN_HPP_NAMESPACE::Result result = 20122 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 20123 20124 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 20125 } 20126 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20127 20128 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20129 template <typename Dispatch> deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20130 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20131 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20132 { 20133 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20134 return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 20135 } 20136 #else 20137 template <typename Dispatch> deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const20138 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 20139 Dispatch const & d ) const 20140 { 20141 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20142 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20143 VULKAN_HPP_ASSERT( d.vkDeferredOperationJoinKHR && "Function <vkDeferredOperationJoinKHR> requires <VK_KHR_deferred_host_operations>" ); 20144 # endif 20145 20146 VULKAN_HPP_NAMESPACE::Result result = 20147 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 20148 VULKAN_HPP_NAMESPACE::detail::resultCheck( 20149 result, 20150 VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", 20151 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); 20152 20153 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 20154 } 20155 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20156 20157 //=== VK_KHR_pipeline_executable_properties === 20158 20159 template <typename Dispatch> getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,uint32_t * pExecutableCount,VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,Dispatch const & d) const20160 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, 20161 uint32_t * pExecutableCount, 20162 VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, 20163 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20164 { 20165 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20166 return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, 20167 reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ), 20168 pExecutableCount, 20169 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) ); 20170 } 20171 20172 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20173 template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch> 20174 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20175 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,Dispatch const & d) const20176 Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const 20177 { 20178 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20179 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20180 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && 20181 "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20182 # endif 20183 20184 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties; 20185 uint32_t executableCount; 20186 VULKAN_HPP_NAMESPACE::Result result; 20187 do 20188 { 20189 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20190 d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 20191 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) 20192 { 20193 properties.resize( executableCount ); 20194 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20195 d.vkGetPipelineExecutablePropertiesKHR( m_device, 20196 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 20197 &executableCount, 20198 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 20199 } 20200 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20201 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 20202 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 20203 if ( executableCount < properties.size() ) 20204 { 20205 properties.resize( executableCount ); 20206 } 20207 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 20208 } 20209 20210 template <typename PipelineExecutablePropertiesKHRAllocator, 20211 typename Dispatch, 20212 typename std::enable_if< 20213 std::is_same<typename PipelineExecutablePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, 20214 int>::type> 20215 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20216 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,Dispatch const & d) const20217 Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, 20218 PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, 20219 Dispatch const & d ) const 20220 { 20221 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20222 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20223 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && 20224 "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20225 # endif 20226 20227 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties( 20228 pipelineExecutablePropertiesKHRAllocator ); 20229 uint32_t executableCount; 20230 VULKAN_HPP_NAMESPACE::Result result; 20231 do 20232 { 20233 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20234 d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 20235 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) 20236 { 20237 properties.resize( executableCount ); 20238 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20239 d.vkGetPipelineExecutablePropertiesKHR( m_device, 20240 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 20241 &executableCount, 20242 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 20243 } 20244 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20245 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 20246 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 20247 if ( executableCount < properties.size() ) 20248 { 20249 properties.resize( executableCount ); 20250 } 20251 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 20252 } 20253 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20254 20255 template <typename Dispatch> 20256 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pStatisticCount,VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,Dispatch const & d) const20257 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 20258 uint32_t * pStatisticCount, 20259 VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, 20260 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20261 { 20262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20263 return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, 20264 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 20265 pStatisticCount, 20266 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) ); 20267 } 20268 20269 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20270 template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch> 20271 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20272 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const20273 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const 20274 { 20275 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20276 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20277 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && 20278 "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20279 # endif 20280 20281 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics; 20282 uint32_t statisticCount; 20283 VULKAN_HPP_NAMESPACE::Result result; 20284 do 20285 { 20286 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR( 20287 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); 20288 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) 20289 { 20290 statistics.resize( statisticCount ); 20291 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20292 d.vkGetPipelineExecutableStatisticsKHR( m_device, 20293 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 20294 &statisticCount, 20295 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 20296 } 20297 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20298 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 20299 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 20300 if ( statisticCount < statistics.size() ) 20301 { 20302 statistics.resize( statisticCount ); 20303 } 20304 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); 20305 } 20306 20307 template <typename PipelineExecutableStatisticKHRAllocator, 20308 typename Dispatch, 20309 typename std::enable_if< 20310 std::is_same<typename PipelineExecutableStatisticKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, 20311 int>::type> 20312 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 20313 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,Dispatch const & d) const20314 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, 20315 PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, 20316 Dispatch const & d ) const 20317 { 20318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20319 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20320 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && 20321 "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20322 # endif 20323 20324 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics( 20325 pipelineExecutableStatisticKHRAllocator ); 20326 uint32_t statisticCount; 20327 VULKAN_HPP_NAMESPACE::Result result; 20328 do 20329 { 20330 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR( 20331 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); 20332 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) 20333 { 20334 statistics.resize( statisticCount ); 20335 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20336 d.vkGetPipelineExecutableStatisticsKHR( m_device, 20337 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 20338 &statisticCount, 20339 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 20340 } 20341 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20342 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 20343 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 20344 if ( statisticCount < statistics.size() ) 20345 { 20346 statistics.resize( statisticCount ); 20347 } 20348 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); 20349 } 20350 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20351 20352 template <typename Dispatch> 20353 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pInternalRepresentationCount,VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,Dispatch const & d) const20354 Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 20355 uint32_t * pInternalRepresentationCount, 20356 VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, 20357 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20358 { 20359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20360 return static_cast<Result>( 20361 d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, 20362 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 20363 pInternalRepresentationCount, 20364 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) ); 20365 } 20366 20367 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20368 template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch> 20369 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 20370 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const20371 Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const 20372 { 20373 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20374 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20375 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && 20376 "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20377 # endif 20378 20379 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 20380 internalRepresentations; 20381 uint32_t internalRepresentationCount; 20382 VULKAN_HPP_NAMESPACE::Result result; 20383 do 20384 { 20385 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 20386 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); 20387 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) 20388 { 20389 internalRepresentations.resize( internalRepresentationCount ); 20390 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 20391 m_device, 20392 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 20393 &internalRepresentationCount, 20394 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 20395 } 20396 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20397 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 20398 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 20399 if ( internalRepresentationCount < internalRepresentations.size() ) 20400 { 20401 internalRepresentations.resize( internalRepresentationCount ); 20402 } 20403 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); 20404 } 20405 20406 template <typename PipelineExecutableInternalRepresentationKHRAllocator, 20407 typename Dispatch, 20408 typename std::enable_if<std::is_same<typename PipelineExecutableInternalRepresentationKHRAllocator::value_type, 20409 VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value, 20410 int>::type> 20411 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 20412 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,Dispatch const & d) const20413 Device::getPipelineExecutableInternalRepresentationsKHR( 20414 const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, 20415 PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, 20416 Dispatch const & d ) const 20417 { 20418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20419 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20420 VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && 20421 "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" ); 20422 # endif 20423 20424 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 20425 internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); 20426 uint32_t internalRepresentationCount; 20427 VULKAN_HPP_NAMESPACE::Result result; 20428 do 20429 { 20430 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 20431 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); 20432 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) 20433 { 20434 internalRepresentations.resize( internalRepresentationCount ); 20435 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 20436 m_device, 20437 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 20438 &internalRepresentationCount, 20439 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 20440 } 20441 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 20442 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 20443 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 20444 if ( internalRepresentationCount < internalRepresentations.size() ) 20445 { 20446 internalRepresentations.resize( internalRepresentationCount ); 20447 } 20448 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); 20449 } 20450 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20451 20452 //=== VK_EXT_host_image_copy === 20453 20454 template <typename Dispatch> copyMemoryToImageEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo,Dispatch const & d) const20455 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo, 20456 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20457 { 20458 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20459 return static_cast<Result>( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( pCopyMemoryToImageInfo ) ) ); 20460 } 20461 20462 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20463 template <typename Dispatch> 20464 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyMemoryToImageEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo,Dispatch const & d) const20465 Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, Dispatch const & d ) const 20466 { 20467 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20468 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20469 VULKAN_HPP_ASSERT( d.vkCopyMemoryToImageEXT && "Function <vkCopyMemoryToImageEXT> requires <VK_EXT_host_image_copy>" ); 20470 # endif 20471 20472 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20473 d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( ©MemoryToImageInfo ) ) ); 20474 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); 20475 20476 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20477 } 20478 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20479 20480 template <typename Dispatch> copyImageToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo,Dispatch const & d) const20481 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo, 20482 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20483 { 20484 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20485 return static_cast<Result>( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( pCopyImageToMemoryInfo ) ) ); 20486 } 20487 20488 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20489 template <typename Dispatch> 20490 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo,Dispatch const & d) const20491 Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, Dispatch const & d ) const 20492 { 20493 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20494 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20495 VULKAN_HPP_ASSERT( d.vkCopyImageToMemoryEXT && "Function <vkCopyImageToMemoryEXT> requires <VK_EXT_host_image_copy>" ); 20496 # endif 20497 20498 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20499 d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( ©ImageToMemoryInfo ) ) ); 20500 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); 20501 20502 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20503 } 20504 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20505 20506 template <typename Dispatch> copyImageToImageEXT(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo,Dispatch const & d) const20507 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo, 20508 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20509 { 20510 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20511 return static_cast<Result>( d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( pCopyImageToImageInfo ) ) ); 20512 } 20513 20514 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20515 template <typename Dispatch> 20516 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToImageEXT(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo,Dispatch const & d) const20517 Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, Dispatch const & d ) const 20518 { 20519 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20520 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20521 VULKAN_HPP_ASSERT( d.vkCopyImageToImageEXT && "Function <vkCopyImageToImageEXT> requires <VK_EXT_host_image_copy>" ); 20522 # endif 20523 20524 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20525 d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( ©ImageToImageInfo ) ) ); 20526 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); 20527 20528 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20529 } 20530 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20531 20532 template <typename Dispatch> transitionImageLayoutEXT(uint32_t transitionCount,const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions,Dispatch const & d) const20533 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, 20534 const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions, 20535 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20536 { 20537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20538 return static_cast<Result>( 20539 d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( pTransitions ) ) ); 20540 } 20541 20542 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20543 template <typename Dispatch> 20544 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type transitionImageLayoutEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions,Dispatch const & d) const20545 Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions, 20546 Dispatch const & d ) const 20547 { 20548 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20549 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20550 VULKAN_HPP_ASSERT( d.vkTransitionImageLayoutEXT && "Function <vkTransitionImageLayoutEXT> requires <VK_EXT_host_image_copy>" ); 20551 # endif 20552 20553 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20554 d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( transitions.data() ) ) ); 20555 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); 20556 20557 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20558 } 20559 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20560 20561 template <typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,Dispatch const & d) const20562 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, 20563 const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, 20564 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, 20565 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20566 { 20567 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20568 d.vkGetImageSubresourceLayout2EXT( m_device, 20569 static_cast<VkImage>( image ), 20570 reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ), 20571 reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); 20572 } 20573 20574 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20575 template <typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,Dispatch const & d) const20576 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2EXT( 20577 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20578 { 20579 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20580 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20581 VULKAN_HPP_ASSERT( 20582 d.vkGetImageSubresourceLayout2EXT && 20583 "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" ); 20584 # endif 20585 20586 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; 20587 d.vkGetImageSubresourceLayout2EXT( m_device, 20588 static_cast<VkImage>( image ), 20589 reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ), 20590 reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 20591 20592 return layout; 20593 } 20594 20595 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,Dispatch const & d) const20596 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT( 20597 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20598 { 20599 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20600 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20601 VULKAN_HPP_ASSERT( 20602 d.vkGetImageSubresourceLayout2EXT && 20603 "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" ); 20604 # endif 20605 20606 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 20607 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>(); 20608 d.vkGetImageSubresourceLayout2EXT( m_device, 20609 static_cast<VkImage>( image ), 20610 reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ), 20611 reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 20612 20613 return structureChain; 20614 } 20615 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20616 20617 //=== VK_KHR_map_memory2 === 20618 20619 template <typename Dispatch> mapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo,void ** ppData,Dispatch const & d) const20620 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo, 20621 void ** ppData, 20622 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20623 { 20624 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20625 return static_cast<Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( pMemoryMapInfo ), ppData ) ); 20626 } 20627 20628 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20629 template <typename Dispatch> 20630 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type mapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo,Dispatch const & d) const20631 Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo, Dispatch const & d ) const 20632 { 20633 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20634 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20635 VULKAN_HPP_ASSERT( d.vkMapMemory2KHR && "Function <vkMapMemory2KHR> requires <VK_KHR_map_memory2>" ); 20636 # endif 20637 20638 void * pData; 20639 VULKAN_HPP_NAMESPACE::Result result = 20640 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( &memoryMapInfo ), &pData ) ); 20641 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); 20642 20643 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); 20644 } 20645 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20646 20647 template <typename Dispatch> unmapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo,Dispatch const & d) const20648 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, 20649 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20650 { 20651 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20652 return static_cast<Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( pMemoryUnmapInfo ) ) ); 20653 } 20654 20655 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20656 template <typename Dispatch> unmapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo,Dispatch const & d) const20657 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, 20658 Dispatch const & d ) const 20659 { 20660 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20661 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20662 VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function <vkUnmapMemory2KHR> requires <VK_KHR_map_memory2>" ); 20663 # endif 20664 20665 VULKAN_HPP_NAMESPACE::Result result = 20666 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) ) ); 20667 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); 20668 20669 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20670 } 20671 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20672 20673 //=== VK_EXT_swapchain_maintenance1 === 20674 20675 template <typename Dispatch> releaseSwapchainImagesEXT(const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo,Dispatch const & d) const20676 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, 20677 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20678 { 20679 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20680 return static_cast<Result>( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) ); 20681 } 20682 20683 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20684 template <typename Dispatch> 20685 VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseSwapchainImagesEXT(const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo,Dispatch const & d) const20686 Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const 20687 { 20688 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20689 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20690 VULKAN_HPP_ASSERT( d.vkReleaseSwapchainImagesEXT && "Function <vkReleaseSwapchainImagesEXT> requires <VK_EXT_swapchain_maintenance1>" ); 20691 # endif 20692 20693 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20694 d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) ) ); 20695 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); 20696 20697 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20698 } 20699 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20700 20701 //=== VK_NV_device_generated_commands === 20702 20703 template <typename Dispatch> getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const20704 VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, 20705 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 20706 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20707 { 20708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20709 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 20710 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), 20711 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 20712 } 20713 20714 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20715 template <typename Dispatch> 20716 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const20717 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, 20718 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20719 { 20720 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20721 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20722 VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && 20723 "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" ); 20724 # endif 20725 20726 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 20727 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 20728 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 20729 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 20730 20731 return memoryRequirements; 20732 } 20733 20734 template <typename X, typename Y, typename... Z, typename Dispatch> 20735 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const20736 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, 20737 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20738 { 20739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20740 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20741 VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && 20742 "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" ); 20743 # endif 20744 20745 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 20746 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 20747 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 20748 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 20749 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 20750 20751 return structureChain; 20752 } 20753 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20754 20755 template <typename Dispatch> preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const20756 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 20757 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20758 { 20759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20760 d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 20761 } 20762 20763 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20764 template <typename Dispatch> preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const20765 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, 20766 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20767 { 20768 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20769 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20770 VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsNV && "Function <vkCmdPreprocessGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" ); 20771 # endif 20772 20773 d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 20774 } 20775 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20776 20777 template <typename Dispatch> executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const20778 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 20779 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 20780 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20781 { 20782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20783 d.vkCmdExecuteGeneratedCommandsNV( 20784 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 20785 } 20786 20787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20788 template <typename Dispatch> executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const20789 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 20790 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, 20791 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20792 { 20793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20794 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20795 VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsNV && "Function <vkCmdExecuteGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" ); 20796 # endif 20797 20798 d.vkCmdExecuteGeneratedCommandsNV( 20799 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 20800 } 20801 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20802 20803 template <typename Dispatch> bindPipelineShaderGroupNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t groupIndex,Dispatch const & d) const20804 VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 20805 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 20806 uint32_t groupIndex, 20807 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20808 { 20809 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20810 d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex ); 20811 } 20812 20813 template <typename Dispatch> 20814 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,Dispatch const & d) const20815 Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, 20816 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20817 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, 20818 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20819 { 20820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20821 return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, 20822 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), 20823 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 20824 reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) ); 20825 } 20826 20827 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20828 template <typename Dispatch> 20829 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20830 Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, 20831 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20832 Dispatch const & d ) const 20833 { 20834 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20835 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20836 VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 20837 # endif 20838 20839 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 20840 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV( 20841 m_device, 20842 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 20843 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20844 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 20845 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); 20846 20847 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); 20848 } 20849 20850 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20851 template <typename Dispatch> 20852 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type createIndirectCommandsLayoutNVUnique(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20853 Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, 20854 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20855 Dispatch const & d ) const 20856 { 20857 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20858 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20859 VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 20860 # endif 20861 20862 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 20863 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV( 20864 m_device, 20865 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 20866 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20867 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 20868 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); 20869 20870 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, 20871 UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>( 20872 indirectCommandsLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 20873 } 20874 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20875 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20876 20877 template <typename Dispatch> destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const20878 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 20879 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20880 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20881 { 20882 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20883 d.vkDestroyIndirectCommandsLayoutNV( 20884 m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 20885 } 20886 20887 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20888 template <typename Dispatch> destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20889 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 20890 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20891 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20892 { 20893 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20894 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20895 VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 20896 # endif 20897 20898 d.vkDestroyIndirectCommandsLayoutNV( 20899 m_device, 20900 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 20901 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 20902 } 20903 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20904 20905 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const20906 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 20907 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20908 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20909 { 20910 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20911 d.vkDestroyIndirectCommandsLayoutNV( 20912 m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 20913 } 20914 20915 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20916 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20917 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 20918 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20919 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20920 { 20921 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20922 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20923 VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); 20924 # endif 20925 20926 d.vkDestroyIndirectCommandsLayoutNV( 20927 m_device, 20928 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 20929 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 20930 } 20931 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20932 20933 //=== VK_EXT_depth_bias_control === 20934 20935 template <typename Dispatch> setDepthBias2EXT(const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo,Dispatch const & d) const20936 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, 20937 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20938 { 20939 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20940 d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) ); 20941 } 20942 20943 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20944 template <typename Dispatch> setDepthBias2EXT(const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo,Dispatch const & d) const20945 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, 20946 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20947 { 20948 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20949 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20950 VULKAN_HPP_ASSERT( d.vkCmdSetDepthBias2EXT && "Function <vkCmdSetDepthBias2EXT> requires <VK_EXT_depth_bias_control>" ); 20951 # endif 20952 20953 d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( &depthBiasInfo ) ); 20954 } 20955 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20956 20957 //=== VK_EXT_acquire_drm_display === 20958 20959 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20960 template <typename Dispatch> acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const20961 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, 20962 VULKAN_HPP_NAMESPACE::DisplayKHR display, 20963 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20964 { 20965 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20966 return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); 20967 } 20968 #else 20969 template <typename Dispatch> 20970 VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const20971 PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 20972 { 20973 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20974 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 20975 VULKAN_HPP_ASSERT( d.vkAcquireDrmDisplayEXT && "Function <vkAcquireDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); 20976 # endif 20977 20978 VULKAN_HPP_NAMESPACE::Result result = 20979 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); 20980 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); 20981 20982 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 20983 } 20984 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20985 20986 template <typename Dispatch> getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,VULKAN_HPP_NAMESPACE::DisplayKHR * display,Dispatch const & d) const20987 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, 20988 uint32_t connectorId, 20989 VULKAN_HPP_NAMESPACE::DisplayKHR * display, 20990 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20991 { 20992 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20993 return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) ); 20994 } 20995 20996 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20997 template <typename Dispatch> 20998 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const20999 PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 21000 { 21001 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21002 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21003 VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); 21004 # endif 21005 21006 VULKAN_HPP_NAMESPACE::DisplayKHR display; 21007 VULKAN_HPP_NAMESPACE::Result result = 21008 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 21009 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); 21010 21011 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); 21012 } 21013 21014 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21015 template <typename Dispatch> 21016 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getDrmDisplayEXTUnique(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const21017 PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 21018 { 21019 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21020 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21021 VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); 21022 # endif 21023 21024 VULKAN_HPP_NAMESPACE::DisplayKHR display; 21025 VULKAN_HPP_NAMESPACE::Result result = 21026 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 21027 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); 21028 21029 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 21030 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 21031 } 21032 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21033 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21034 21035 //=== VK_EXT_private_data === 21036 21037 template <typename Dispatch> createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,Dispatch const & d) const21038 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, 21039 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21040 VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, 21041 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21042 { 21043 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21044 return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, 21045 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), 21046 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 21047 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); 21048 } 21049 21050 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21051 template <typename Dispatch> 21052 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21053 Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 21054 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21055 Dispatch const & d ) const 21056 { 21057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21058 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21059 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21060 # endif 21061 21062 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 21063 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT( 21064 m_device, 21065 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 21066 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21067 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 21068 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); 21069 21070 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); 21071 } 21072 21073 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21074 template <typename Dispatch> 21075 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotEXTUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21076 Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 21077 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21078 Dispatch const & d ) const 21079 { 21080 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21081 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21082 VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21083 # endif 21084 21085 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 21086 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT( 21087 m_device, 21088 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 21089 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21090 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 21091 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); 21092 21093 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 21094 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 21095 } 21096 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21097 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21098 21099 template <typename Dispatch> destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21100 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21101 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21102 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21103 { 21104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21105 d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21106 } 21107 21108 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21109 template <typename Dispatch> destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21110 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21111 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21112 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21113 { 21114 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21115 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21116 VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlotEXT && "Function <vkDestroyPrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21117 # endif 21118 21119 d.vkDestroyPrivateDataSlotEXT( 21120 m_device, 21121 static_cast<VkPrivateDataSlot>( privateDataSlot ), 21122 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21123 } 21124 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21125 21126 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 21127 template <typename Dispatch> setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const21128 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 21129 uint64_t objectHandle, 21130 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21131 uint64_t data, 21132 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21133 { 21134 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21135 return static_cast<Result>( 21136 d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 21137 } 21138 #else 21139 template <typename Dispatch> setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const21140 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 21141 uint64_t objectHandle, 21142 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21143 uint64_t data, 21144 Dispatch const & d ) const 21145 { 21146 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21147 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21148 VULKAN_HPP_ASSERT( d.vkSetPrivateDataEXT && "Function <vkSetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21149 # endif 21150 21151 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21152 d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 21153 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); 21154 21155 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21156 } 21157 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 21158 21159 template <typename Dispatch> getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t * pData,Dispatch const & d) const21160 VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 21161 uint64_t objectHandle, 21162 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21163 uint64_t * pData, 21164 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21165 { 21166 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21167 d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); 21168 } 21169 21170 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21171 template <typename Dispatch> getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Dispatch const & d) const21172 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 21173 uint64_t objectHandle, 21174 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 21175 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21176 { 21177 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21178 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21179 VULKAN_HPP_ASSERT( d.vkGetPrivateDataEXT && "Function <vkGetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); 21180 # endif 21181 21182 uint64_t data; 21183 d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); 21184 21185 return data; 21186 } 21187 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21188 21189 //=== VK_KHR_video_encode_queue === 21190 21191 template <typename Dispatch> 21192 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo,VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties,Dispatch const & d) const21193 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, 21194 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, 21195 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21196 { 21197 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21198 return static_cast<Result>( 21199 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 21200 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( pQualityLevelInfo ), 21201 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( pQualityLevelProperties ) ) ); 21202 } 21203 21204 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21205 template <typename Dispatch> 21206 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>::type getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,Dispatch const & d) const21207 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, 21208 Dispatch const & d ) const 21209 { 21210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21211 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21212 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && 21213 "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" ); 21214 # endif 21215 21216 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; 21217 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21218 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 21219 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), 21220 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); 21221 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); 21222 21223 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( qualityLevelProperties ) ); 21224 } 21225 21226 template <typename X, typename Y, typename... Z, typename Dispatch> 21227 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,Dispatch const & d) const21228 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, 21229 Dispatch const & d ) const 21230 { 21231 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21232 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21233 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && 21234 "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" ); 21235 # endif 21236 21237 StructureChain<X, Y, Z...> structureChain; 21238 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = 21239 structureChain.template get<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>(); 21240 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21241 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 21242 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), 21243 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); 21244 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); 21245 21246 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 21247 } 21248 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21249 21250 template <typename Dispatch> 21251 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo,VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo,size_t * pDataSize,void * pData,Dispatch const & d) const21252 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, 21253 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, 21254 size_t * pDataSize, 21255 void * pData, 21256 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21257 { 21258 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21259 return static_cast<Result>( 21260 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21261 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( pVideoSessionParametersInfo ), 21262 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( pFeedbackInfo ), 21263 pDataSize, 21264 pData ) ); 21265 } 21266 21267 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21268 template <typename Uint8_tAllocator, typename Dispatch> 21269 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21270 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Dispatch const & d) const21271 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 21272 Dispatch const & d ) const 21273 { 21274 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21275 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21276 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 21277 # endif 21278 21279 std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_; 21280 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; 21281 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 21282 size_t dataSize; 21283 VULKAN_HPP_NAMESPACE::Result result; 21284 do 21285 { 21286 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21287 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21288 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21289 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21290 &dataSize, 21291 nullptr ) ); 21292 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 21293 { 21294 data.resize( dataSize ); 21295 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21296 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21297 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21298 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21299 &dataSize, 21300 reinterpret_cast<void *>( data.data() ) ) ); 21301 } 21302 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21303 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 21304 21305 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 21306 } 21307 21308 template <typename Uint8_tAllocator, 21309 typename Dispatch, 21310 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 21311 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21312 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const21313 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 21314 Uint8_tAllocator & uint8_tAllocator, 21315 Dispatch const & d ) const 21316 { 21317 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21318 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21319 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 21320 # endif 21321 21322 std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_( 21323 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 21324 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; 21325 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 21326 size_t dataSize; 21327 VULKAN_HPP_NAMESPACE::Result result; 21328 do 21329 { 21330 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21331 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21332 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21333 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21334 &dataSize, 21335 nullptr ) ); 21336 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 21337 { 21338 data.resize( dataSize ); 21339 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21340 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21341 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21342 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21343 &dataSize, 21344 reinterpret_cast<void *>( data.data() ) ) ); 21345 } 21346 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21347 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 21348 21349 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 21350 } 21351 21352 template <typename X, typename Y, typename... Z, typename Uint8_tAllocator, typename Dispatch> 21353 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21354 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Dispatch const & d) const21355 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 21356 Dispatch const & d ) const 21357 { 21358 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21359 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21360 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 21361 # endif 21362 21363 std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_; 21364 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = 21365 data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>(); 21366 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 21367 size_t dataSize; 21368 VULKAN_HPP_NAMESPACE::Result result; 21369 do 21370 { 21371 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21372 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21373 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21374 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21375 &dataSize, 21376 nullptr ) ); 21377 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 21378 { 21379 data.resize( dataSize ); 21380 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21381 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21382 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21383 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21384 &dataSize, 21385 reinterpret_cast<void *>( data.data() ) ) ); 21386 } 21387 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21388 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 21389 21390 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 21391 } 21392 21393 template <typename X, 21394 typename Y, 21395 typename... Z, 21396 typename Uint8_tAllocator, 21397 typename Dispatch, 21398 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 21399 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 21400 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const21401 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 21402 Uint8_tAllocator & uint8_tAllocator, 21403 Dispatch const & d ) const 21404 { 21405 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21406 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21407 VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); 21408 # endif 21409 21410 std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_( 21411 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 21412 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = 21413 data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>(); 21414 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 21415 size_t dataSize; 21416 VULKAN_HPP_NAMESPACE::Result result; 21417 do 21418 { 21419 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21420 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21421 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21422 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21423 &dataSize, 21424 nullptr ) ); 21425 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 21426 { 21427 data.resize( dataSize ); 21428 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21429 d.vkGetEncodedVideoSessionParametersKHR( m_device, 21430 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 21431 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 21432 &dataSize, 21433 reinterpret_cast<void *>( data.data() ) ) ); 21434 } 21435 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21436 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 21437 21438 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 21439 } 21440 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21441 21442 template <typename Dispatch> encodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,Dispatch const & d) const21443 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, 21444 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21445 { 21446 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21447 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) ); 21448 } 21449 21450 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21451 template <typename Dispatch> encodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,Dispatch const & d) const21452 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, 21453 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21454 { 21455 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21456 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21457 VULKAN_HPP_ASSERT( d.vkCmdEncodeVideoKHR && "Function <vkCmdEncodeVideoKHR> requires <VK_KHR_video_encode_queue>" ); 21458 # endif 21459 21460 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) ); 21461 } 21462 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21463 21464 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 21465 //=== VK_NV_cuda_kernel_launch === 21466 21467 template <typename Dispatch> createCudaModuleNV(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule,Dispatch const & d) const21468 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, 21469 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21470 VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, 21471 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21472 { 21473 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21474 return static_cast<Result>( d.vkCreateCudaModuleNV( m_device, 21475 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ), 21476 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 21477 reinterpret_cast<VkCudaModuleNV *>( pModule ) ) ); 21478 } 21479 21480 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21481 template <typename Dispatch> 21482 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaModuleNV>::type createCudaModuleNV(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21483 Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, 21484 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21485 Dispatch const & d ) const 21486 { 21487 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21488 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21489 VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 21490 # endif 21491 21492 VULKAN_HPP_NAMESPACE::CudaModuleNV module; 21493 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21494 d.vkCreateCudaModuleNV( m_device, 21495 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), 21496 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21497 reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); 21498 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); 21499 21500 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); 21501 } 21502 21503 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21504 template <typename Dispatch> 21505 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>>::type createCudaModuleNVUnique(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21506 Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, 21507 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21508 Dispatch const & d ) const 21509 { 21510 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21511 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21512 VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 21513 # endif 21514 21515 VULKAN_HPP_NAMESPACE::CudaModuleNV module; 21516 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21517 d.vkCreateCudaModuleNV( m_device, 21518 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), 21519 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21520 reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); 21521 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); 21522 21523 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 21524 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 21525 } 21526 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21527 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21528 21529 template <typename Dispatch> getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,size_t * pCacheSize,void * pCacheData,Dispatch const & d) const21530 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 21531 size_t * pCacheSize, 21532 void * pCacheData, 21533 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21534 { 21535 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21536 return static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) ); 21537 } 21538 21539 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21540 template <typename Uint8_tAllocator, typename Dispatch> 21541 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Dispatch const & d) const21542 Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const 21543 { 21544 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21545 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21546 VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" ); 21547 # endif 21548 21549 std::vector<uint8_t, Uint8_tAllocator> cacheData; 21550 size_t cacheSize; 21551 VULKAN_HPP_NAMESPACE::Result result; 21552 do 21553 { 21554 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); 21555 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) 21556 { 21557 cacheData.resize( cacheSize ); 21558 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21559 d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); 21560 } 21561 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21562 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); 21563 VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); 21564 if ( cacheSize < cacheData.size() ) 21565 { 21566 cacheData.resize( cacheSize ); 21567 } 21568 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); 21569 } 21570 21571 template <typename Uint8_tAllocator, 21572 typename Dispatch, 21573 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 21574 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const21575 Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 21576 { 21577 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21578 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21579 VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" ); 21580 # endif 21581 21582 std::vector<uint8_t, Uint8_tAllocator> cacheData( uint8_tAllocator ); 21583 size_t cacheSize; 21584 VULKAN_HPP_NAMESPACE::Result result; 21585 do 21586 { 21587 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); 21588 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) 21589 { 21590 cacheData.resize( cacheSize ); 21591 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21592 d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); 21593 } 21594 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 21595 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); 21596 VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); 21597 if ( cacheSize < cacheData.size() ) 21598 { 21599 cacheData.resize( cacheSize ); 21600 } 21601 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); 21602 } 21603 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21604 21605 template <typename Dispatch> createCudaFunctionNV(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction,Dispatch const & d) const21606 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, 21607 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21608 VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, 21609 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21610 { 21611 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21612 return static_cast<Result>( d.vkCreateCudaFunctionNV( m_device, 21613 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ), 21614 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 21615 reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) ); 21616 } 21617 21618 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21619 template <typename Dispatch> 21620 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::type createCudaFunctionNV(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21621 Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, 21622 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21623 Dispatch const & d ) const 21624 { 21625 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21626 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21627 VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 21628 # endif 21629 21630 VULKAN_HPP_NAMESPACE::CudaFunctionNV function; 21631 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21632 d.vkCreateCudaFunctionNV( m_device, 21633 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), 21634 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21635 reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); 21636 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); 21637 21638 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); 21639 } 21640 21641 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21642 template <typename Dispatch> 21643 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>>::type createCudaFunctionNVUnique(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21644 Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, 21645 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21646 Dispatch const & d ) const 21647 { 21648 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21649 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21650 VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 21651 # endif 21652 21653 VULKAN_HPP_NAMESPACE::CudaFunctionNV function; 21654 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21655 d.vkCreateCudaFunctionNV( m_device, 21656 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), 21657 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21658 reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); 21659 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); 21660 21661 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 21662 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 21663 } 21664 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21665 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21666 21667 template <typename Dispatch> destroyCudaModuleNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21668 VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 21669 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21670 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21671 { 21672 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21673 d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21674 } 21675 21676 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21677 template <typename Dispatch> destroyCudaModuleNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21678 VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 21679 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21680 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21681 { 21682 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21683 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21684 VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 21685 # endif 21686 21687 d.vkDestroyCudaModuleNV( m_device, 21688 static_cast<VkCudaModuleNV>( module ), 21689 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21690 } 21691 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21692 21693 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaModuleNV module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21694 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 21695 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21696 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21697 { 21698 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21699 d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21700 } 21701 21702 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21703 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21704 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 21705 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21706 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21707 { 21708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21709 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21710 VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); 21711 # endif 21712 21713 d.vkDestroyCudaModuleNV( m_device, 21714 static_cast<VkCudaModuleNV>( module ), 21715 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21716 } 21717 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21718 21719 template <typename Dispatch> destroyCudaFunctionNV(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21720 VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 21721 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21722 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21723 { 21724 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21725 d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21726 } 21727 21728 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21729 template <typename Dispatch> destroyCudaFunctionNV(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21730 VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 21731 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21732 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21733 { 21734 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21735 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21736 VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 21737 # endif 21738 21739 d.vkDestroyCudaFunctionNV( m_device, 21740 static_cast<VkCudaFunctionNV>( function ), 21741 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21742 } 21743 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21744 21745 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21746 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 21747 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21748 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21749 { 21750 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21751 d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21752 } 21753 21754 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21755 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21756 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 21757 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21758 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21759 { 21760 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21761 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21762 VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); 21763 # endif 21764 21765 d.vkDestroyCudaFunctionNV( m_device, 21766 static_cast<VkCudaFunctionNV>( function ), 21767 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21768 } 21769 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21770 21771 template <typename Dispatch> cudaLaunchKernelNV(const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo,Dispatch const & d) const21772 VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, 21773 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21774 { 21775 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21776 d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) ); 21777 } 21778 21779 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21780 template <typename Dispatch> cudaLaunchKernelNV(const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo,Dispatch const & d) const21781 VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, 21782 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21783 { 21784 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21785 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21786 VULKAN_HPP_ASSERT( d.vkCmdCudaLaunchKernelNV && "Function <vkCmdCudaLaunchKernelNV> requires <VK_NV_cuda_kernel_launch>" ); 21787 # endif 21788 21789 d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) ); 21790 } 21791 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21792 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 21793 21794 #if defined( VK_USE_PLATFORM_METAL_EXT ) 21795 //=== VK_EXT_metal_objects === 21796 21797 template <typename Dispatch> exportMetalObjectsEXT(VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,Dispatch const & d) const21798 VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, 21799 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21800 { 21801 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21802 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) ); 21803 } 21804 21805 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21806 template <typename Dispatch> 21807 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT(Dispatch const & d) const21808 Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21809 { 21810 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21811 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21812 VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" ); 21813 # endif 21814 21815 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; 21816 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) ); 21817 21818 return metalObjectsInfo; 21819 } 21820 21821 template <typename X, typename Y, typename... Z, typename Dispatch> 21822 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> exportMetalObjectsEXT(Dispatch const & d) const21823 Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21824 { 21825 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21826 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21827 VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" ); 21828 # endif 21829 21830 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 21831 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>(); 21832 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) ); 21833 21834 return structureChain; 21835 } 21836 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21837 #endif /*VK_USE_PLATFORM_METAL_EXT*/ 21838 21839 //=== VK_KHR_synchronization2 === 21840 21841 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const21842 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 21843 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 21844 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21845 { 21846 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21847 d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 21848 } 21849 21850 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21851 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const21852 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 21853 const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 21854 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21855 { 21856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21857 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21858 VULKAN_HPP_ASSERT( d.vkCmdSetEvent2KHR && "Function <vkCmdSetEvent2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 21859 # endif 21860 21861 d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 21862 } 21863 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21864 21865 template <typename Dispatch> resetEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const21866 VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 21867 VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, 21868 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21869 { 21870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21871 d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); 21872 } 21873 21874 template <typename Dispatch> waitEvents2KHR(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const21875 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, 21876 const VULKAN_HPP_NAMESPACE::Event * pEvents, 21877 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, 21878 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21879 { 21880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21881 d.vkCmdWaitEvents2KHR( 21882 m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); 21883 } 21884 21885 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21886 template <typename Dispatch> waitEvents2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const21887 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 21888 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, 21889 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 21890 { 21891 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21892 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21893 VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2KHR && "Function <vkCmdWaitEvents2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 21894 # endif 21895 # ifdef VULKAN_HPP_NO_EXCEPTIONS 21896 VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); 21897 # else 21898 if ( events.size() != dependencyInfos.size() ) 21899 { 21900 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); 21901 } 21902 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 21903 21904 d.vkCmdWaitEvents2KHR( m_commandBuffer, 21905 events.size(), 21906 reinterpret_cast<const VkEvent *>( events.data() ), 21907 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); 21908 } 21909 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21910 21911 template <typename Dispatch> pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const21912 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 21913 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21914 { 21915 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21916 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 21917 } 21918 21919 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21920 template <typename Dispatch> pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const21921 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 21922 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21923 { 21924 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21925 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21926 VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2KHR && "Function <vkCmdPipelineBarrier2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 21927 # endif 21928 21929 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 21930 } 21931 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21932 21933 template <typename Dispatch> writeTimestamp2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const21934 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 21935 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 21936 uint32_t query, 21937 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21938 { 21939 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21940 d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); 21941 } 21942 21943 template <typename Dispatch> submit2KHR(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const21944 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, 21945 const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, 21946 VULKAN_HPP_NAMESPACE::Fence fence, 21947 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21948 { 21949 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21950 return static_cast<Result>( 21951 d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 21952 } 21953 21954 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21955 template <typename Dispatch> submit2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const21956 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR( 21957 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 21958 { 21959 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21960 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 21961 VULKAN_HPP_ASSERT( d.vkQueueSubmit2KHR && "Function <vkQueueSubmit2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); 21962 # endif 21963 21964 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21965 d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 21966 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); 21967 21968 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 21969 } 21970 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21971 21972 template <typename Dispatch> writeBufferMarker2AMD(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const21973 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 21974 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 21975 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 21976 uint32_t marker, 21977 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21978 { 21979 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21980 d.vkCmdWriteBufferMarker2AMD( 21981 m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker ); 21982 } 21983 21984 template <typename Dispatch> getCheckpointData2NV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,Dispatch const & d) const21985 VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, 21986 VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, 21987 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21988 { 21989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21990 d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) ); 21991 } 21992 21993 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21994 template <typename CheckpointData2NVAllocator, typename Dispatch> 21995 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(Dispatch const & d) const21996 Queue::getCheckpointData2NV( Dispatch const & d ) const 21997 { 21998 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21999 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22000 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_KHR_synchronization2>" ); 22001 # endif 22002 22003 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData; 22004 uint32_t checkpointDataCount; 22005 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 22006 checkpointData.resize( checkpointDataCount ); 22007 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 22008 22009 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 22010 if ( checkpointDataCount < checkpointData.size() ) 22011 { 22012 checkpointData.resize( checkpointDataCount ); 22013 } 22014 return checkpointData; 22015 } 22016 22017 template <typename CheckpointData2NVAllocator, 22018 typename Dispatch, 22019 typename std::enable_if<std::is_same<typename CheckpointData2NVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, int>::type> 22020 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(CheckpointData2NVAllocator & checkpointData2NVAllocator,Dispatch const & d) const22021 Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const 22022 { 22023 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22024 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22025 VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_KHR_synchronization2>" ); 22026 # endif 22027 22028 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator ); 22029 uint32_t checkpointDataCount; 22030 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 22031 checkpointData.resize( checkpointDataCount ); 22032 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 22033 22034 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 22035 if ( checkpointDataCount < checkpointData.size() ) 22036 { 22037 checkpointData.resize( checkpointDataCount ); 22038 } 22039 return checkpointData; 22040 } 22041 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22042 22043 //=== VK_EXT_descriptor_buffer === 22044 22045 template <typename Dispatch> getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes,Dispatch const & d) const22046 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, 22047 VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, 22048 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22049 { 22050 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22051 d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) ); 22052 } 22053 22054 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22055 template <typename Dispatch> 22056 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,Dispatch const & d) const22057 Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22058 { 22059 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22060 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22061 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSizeEXT && "Function <vkGetDescriptorSetLayoutSizeEXT> requires <VK_EXT_descriptor_buffer>" ); 22062 # endif 22063 22064 VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; 22065 d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) ); 22066 22067 return layoutSizeInBytes; 22068 } 22069 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22070 22071 template <typename Dispatch> getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,uint32_t binding,VULKAN_HPP_NAMESPACE::DeviceSize * pOffset,Dispatch const & d) const22072 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, 22073 uint32_t binding, 22074 VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, 22075 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22076 { 22077 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22078 d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) ); 22079 } 22080 22081 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22082 template <typename Dispatch> getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,uint32_t binding,Dispatch const & d) const22083 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT( 22084 VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22085 { 22086 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22087 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22088 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutBindingOffsetEXT && 22089 "Function <vkGetDescriptorSetLayoutBindingOffsetEXT> requires <VK_EXT_descriptor_buffer>" ); 22090 # endif 22091 22092 VULKAN_HPP_NAMESPACE::DeviceSize offset; 22093 d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) ); 22094 22095 return offset; 22096 } 22097 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22098 22099 template <typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo,size_t dataSize,void * pDescriptor,Dispatch const & d) const22100 VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, 22101 size_t dataSize, 22102 void * pDescriptor, 22103 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22104 { 22105 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22106 d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor ); 22107 } 22108 22109 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22110 template <typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,size_t dataSize,void * pDescriptor,Dispatch const & d) const22111 VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, 22112 size_t dataSize, 22113 void * pDescriptor, 22114 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22115 { 22116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22117 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22118 VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" ); 22119 # endif 22120 22121 d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), dataSize, pDescriptor ); 22122 } 22123 22124 template <typename DescriptorType, typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,Dispatch const & d) const22125 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, 22126 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22127 { 22128 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22129 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22130 VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" ); 22131 # endif 22132 22133 DescriptorType descriptor; 22134 d.vkGetDescriptorEXT( 22135 m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) ); 22136 22137 return descriptor; 22138 } 22139 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22140 22141 template <typename Dispatch> bindDescriptorBuffersEXT(uint32_t bufferCount,const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos,Dispatch const & d) const22142 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount, 22143 const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, 22144 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22145 { 22146 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22147 d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) ); 22148 } 22149 22150 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22151 template <typename Dispatch> 22152 VULKAN_HPP_INLINE void bindDescriptorBuffersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos,Dispatch const & d) const22153 CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos, 22154 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22155 { 22156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22157 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22158 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBuffersEXT && "Function <vkCmdBindDescriptorBuffersEXT> requires <VK_EXT_descriptor_buffer>" ); 22159 # endif 22160 22161 d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) ); 22162 } 22163 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22164 22165 template <typename Dispatch> setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t setCount,const uint32_t * pBufferIndices,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const22166 VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 22167 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 22168 uint32_t firstSet, 22169 uint32_t setCount, 22170 const uint32_t * pBufferIndices, 22171 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 22172 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22173 { 22174 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22175 d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, 22176 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 22177 static_cast<VkPipelineLayout>( layout ), 22178 firstSet, 22179 setCount, 22180 pBufferIndices, 22181 reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); 22182 } 22183 22184 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22185 template <typename Dispatch> setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const22186 VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 22187 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 22188 uint32_t firstSet, 22189 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices, 22190 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 22191 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 22192 { 22193 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22194 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22195 VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsetsEXT && "Function <vkCmdSetDescriptorBufferOffsetsEXT> requires <VK_EXT_descriptor_buffer>" ); 22196 # endif 22197 # ifdef VULKAN_HPP_NO_EXCEPTIONS 22198 VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); 22199 # else 22200 if ( bufferIndices.size() != offsets.size() ) 22201 { 22202 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); 22203 } 22204 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 22205 22206 d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, 22207 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 22208 static_cast<VkPipelineLayout>( layout ), 22209 firstSet, 22210 bufferIndices.size(), 22211 bufferIndices.data(), 22212 reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); 22213 } 22214 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22215 22216 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplersEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,Dispatch const & d) const22217 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 22218 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 22219 uint32_t set, 22220 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22221 { 22222 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22223 d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( 22224 m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set ); 22225 } 22226 22227 template <typename Dispatch> getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22228 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT( 22229 const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22230 { 22231 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22232 return static_cast<Result>( 22233 d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22234 } 22235 22236 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22237 template <typename DataType, typename Dispatch> 22238 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22239 Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 22240 { 22241 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22242 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22243 VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureDescriptorDataEXT && 22244 "Function <vkGetBufferOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22245 # endif 22246 22247 DataType data; 22248 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22249 d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22250 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); 22251 22252 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22253 } 22254 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22255 22256 template <typename Dispatch> getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22257 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT( 22258 const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22259 { 22260 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22261 return static_cast<Result>( 22262 d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22263 } 22264 22265 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22266 template <typename DataType, typename Dispatch> 22267 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22268 Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 22269 { 22270 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22271 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22272 VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDescriptorDataEXT && 22273 "Function <vkGetImageOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22274 # endif 22275 22276 DataType data; 22277 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22278 d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22279 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); 22280 22281 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22282 } 22283 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22284 22285 template <typename Dispatch> getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22286 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT( 22287 const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22288 { 22289 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22290 return static_cast<Result>( 22291 d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22292 } 22293 22294 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22295 template <typename DataType, typename Dispatch> 22296 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22297 Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 22298 { 22299 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22300 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22301 VULKAN_HPP_ASSERT( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT && 22302 "Function <vkGetImageViewOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22303 # endif 22304 22305 DataType data; 22306 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22307 d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22308 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); 22309 22310 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22311 } 22312 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22313 22314 template <typename Dispatch> getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22315 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT( 22316 const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22317 { 22318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22319 return static_cast<Result>( 22320 d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22321 } 22322 22323 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22324 template <typename DataType, typename Dispatch> 22325 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22326 Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 22327 { 22328 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22329 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22330 VULKAN_HPP_ASSERT( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT && 22331 "Function <vkGetSamplerOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22332 # endif 22333 22334 DataType data; 22335 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22336 d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22337 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); 22338 22339 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22340 } 22341 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22342 22343 template <typename Dispatch> getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const22344 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( 22345 const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22346 { 22347 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22348 return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( 22349 m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 22350 } 22351 22352 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22353 template <typename DataType, typename Dispatch> 22354 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const22355 Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, 22356 Dispatch const & d ) const 22357 { 22358 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22359 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22360 VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && 22361 "Function <vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); 22362 # endif 22363 22364 DataType data; 22365 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( 22366 m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 22367 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); 22368 22369 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 22370 } 22371 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22372 22373 //=== VK_NV_fragment_shading_rate_enums === 22374 22375 template <typename Dispatch> setFragmentShadingRateEnumNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const22376 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, 22377 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 22378 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22379 { 22380 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22381 d.vkCmdSetFragmentShadingRateEnumNV( 22382 m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 22383 } 22384 22385 //=== VK_EXT_mesh_shader === 22386 22387 template <typename Dispatch> 22388 VULKAN_HPP_INLINE void drawMeshTasksEXT(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const22389 CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22390 { 22391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22392 d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 22393 } 22394 22395 template <typename Dispatch> drawMeshTasksIndirectEXT(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const22396 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, 22397 VULKAN_HPP_NAMESPACE::DeviceSize offset, 22398 uint32_t drawCount, 22399 uint32_t stride, 22400 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22401 { 22402 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22403 d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 22404 } 22405 22406 template <typename Dispatch> drawMeshTasksIndirectCountEXT(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const22407 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, 22408 VULKAN_HPP_NAMESPACE::DeviceSize offset, 22409 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 22410 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 22411 uint32_t maxDrawCount, 22412 uint32_t stride, 22413 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22414 { 22415 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22416 d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer, 22417 static_cast<VkBuffer>( buffer ), 22418 static_cast<VkDeviceSize>( offset ), 22419 static_cast<VkBuffer>( countBuffer ), 22420 static_cast<VkDeviceSize>( countBufferOffset ), 22421 maxDrawCount, 22422 stride ); 22423 } 22424 22425 //=== VK_KHR_copy_commands2 === 22426 22427 template <typename Dispatch> copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const22428 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, 22429 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22430 { 22431 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22432 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); 22433 } 22434 22435 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22436 template <typename Dispatch> copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const22437 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, 22438 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22439 { 22440 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22441 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22442 VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2KHR && "Function <vkCmdCopyBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 22443 # endif 22444 22445 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); 22446 } 22447 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22448 22449 template <typename Dispatch> copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const22450 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, 22451 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22452 { 22453 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22454 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); 22455 } 22456 22457 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22458 template <typename Dispatch> copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const22459 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, 22460 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22461 { 22462 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22463 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22464 VULKAN_HPP_ASSERT( d.vkCmdCopyImage2KHR && "Function <vkCmdCopyImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 22465 # endif 22466 22467 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); 22468 } 22469 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22470 22471 template <typename Dispatch> copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const22472 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, 22473 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22474 { 22475 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22476 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); 22477 } 22478 22479 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22480 template <typename Dispatch> copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const22481 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, 22482 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22483 { 22484 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22485 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22486 VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2KHR && "Function <vkCmdCopyBufferToImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 22487 # endif 22488 22489 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); 22490 } 22491 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22492 22493 template <typename Dispatch> copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const22494 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, 22495 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22496 { 22497 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22498 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); 22499 } 22500 22501 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22502 template <typename Dispatch> copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const22503 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, 22504 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22505 { 22506 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22507 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22508 VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2KHR && "Function <vkCmdCopyImageToBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 22509 # endif 22510 22511 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); 22512 } 22513 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22514 22515 template <typename Dispatch> blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const22516 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, 22517 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22518 { 22519 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22520 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); 22521 } 22522 22523 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22524 template <typename Dispatch> blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const22525 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, 22526 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22527 { 22528 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22529 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22530 VULKAN_HPP_ASSERT( d.vkCmdBlitImage2KHR && "Function <vkCmdBlitImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 22531 # endif 22532 22533 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); 22534 } 22535 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22536 22537 template <typename Dispatch> resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const22538 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, 22539 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22540 { 22541 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22542 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); 22543 } 22544 22545 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22546 template <typename Dispatch> resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const22547 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, 22548 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22549 { 22550 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22551 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22552 VULKAN_HPP_ASSERT( d.vkCmdResolveImage2KHR && "Function <vkCmdResolveImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); 22553 # endif 22554 22555 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); 22556 } 22557 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22558 22559 //=== VK_EXT_device_fault === 22560 22561 template <typename Dispatch> getFaultInfoEXT(VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,Dispatch const & d) const22562 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, 22563 VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, 22564 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22565 { 22566 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22567 return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( 22568 m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) ); 22569 } 22570 22571 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22572 template <typename Dispatch> 22573 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 22574 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>::type getFaultInfoEXT(Dispatch const & d) const22575 Device::getFaultInfoEXT( Dispatch const & d ) const 22576 { 22577 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22578 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22579 VULKAN_HPP_ASSERT( d.vkGetDeviceFaultInfoEXT && "Function <vkGetDeviceFaultInfoEXT> requires <VK_EXT_device_fault>" ); 22580 # endif 22581 22582 std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data_; 22583 VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data_.first; 22584 VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data_.second; 22585 VULKAN_HPP_NAMESPACE::Result result; 22586 do 22587 { 22588 result = 22589 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), nullptr ) ); 22590 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 22591 { 22592 std::free( faultInfo.pAddressInfos ); 22593 if ( faultCounts.addressInfoCount ) 22594 { 22595 faultInfo.pAddressInfos = reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT *>( 22596 std::malloc( faultCounts.addressInfoCount * sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) ) ); 22597 } 22598 std::free( faultInfo.pVendorInfos ); 22599 if ( faultCounts.vendorInfoCount ) 22600 { 22601 faultInfo.pVendorInfos = reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT *>( 22602 std::malloc( faultCounts.vendorInfoCount * sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) ) ); 22603 } 22604 std::free( faultInfo.pVendorBinaryData ); 22605 if ( faultCounts.vendorBinarySize ) 22606 { 22607 faultInfo.pVendorBinaryData = std::malloc( faultCounts.vendorBinarySize ); 22608 } 22609 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceFaultInfoEXT( 22610 m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) ) ); 22611 } 22612 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22613 VULKAN_HPP_NAMESPACE::detail::resultCheck( 22614 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); 22615 22616 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 22617 } 22618 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22619 22620 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 22621 //=== VK_NV_acquire_winrt_display === 22622 22623 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 22624 template <typename Dispatch> acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const22625 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, 22626 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22627 { 22628 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22629 return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 22630 } 22631 # else 22632 template <typename Dispatch> 22633 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const22634 PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 22635 { 22636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22637 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22638 VULKAN_HPP_ASSERT( d.vkAcquireWinrtDisplayNV && "Function <vkAcquireWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); 22639 # endif 22640 22641 VULKAN_HPP_NAMESPACE::Result result = 22642 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 22643 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); 22644 22645 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 22646 } 22647 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 22648 22649 template <typename Dispatch> getWinrtDisplayNV(uint32_t deviceRelativeId,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const22650 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, 22651 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 22652 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22653 { 22654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22655 return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 22656 } 22657 22658 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22659 template <typename Dispatch> 22660 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getWinrtDisplayNV(uint32_t deviceRelativeId,Dispatch const & d) const22661 PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const 22662 { 22663 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22664 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22665 VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); 22666 # endif 22667 22668 VULKAN_HPP_NAMESPACE::DisplayKHR display; 22669 VULKAN_HPP_NAMESPACE::Result result = 22670 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 22671 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); 22672 22673 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); 22674 } 22675 22676 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22677 template <typename Dispatch> 22678 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getWinrtDisplayNVUnique(uint32_t deviceRelativeId,Dispatch const & d) const22679 PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const 22680 { 22681 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22682 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22683 VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); 22684 # endif 22685 22686 VULKAN_HPP_NAMESPACE::DisplayKHR display; 22687 VULKAN_HPP_NAMESPACE::Result result = 22688 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 22689 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); 22690 22691 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 22692 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 22693 } 22694 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22695 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22696 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 22697 22698 #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) 22699 //=== VK_EXT_directfb_surface === 22700 22701 template <typename Dispatch> createDirectFBSurfaceEXT(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const22702 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, 22703 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22704 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 22705 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22706 { 22707 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22708 return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, 22709 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), 22710 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22711 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 22712 } 22713 22714 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22715 template <typename Dispatch> 22716 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDirectFBSurfaceEXT(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22717 Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, 22718 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22719 Dispatch const & d ) const 22720 { 22721 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22722 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22723 VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" ); 22724 # endif 22725 22726 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 22727 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT( 22728 m_instance, 22729 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 22730 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22731 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 22732 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); 22733 22734 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 22735 } 22736 22737 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22738 template <typename Dispatch> 22739 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDirectFBSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22740 Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, 22741 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22742 Dispatch const & d ) const 22743 { 22744 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22745 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22746 VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" ); 22747 # endif 22748 22749 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 22750 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT( 22751 m_instance, 22752 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 22753 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22754 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 22755 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); 22756 22757 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 22758 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 22759 } 22760 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22761 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22762 22763 template <typename Dispatch> getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB * dfb,Dispatch const & d) const22764 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, 22765 IDirectFB * dfb, 22766 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22767 { 22768 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22769 return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); 22770 } 22771 22772 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22773 template <typename Dispatch> 22774 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB & dfb,Dispatch const & d) const22775 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22776 { 22777 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22778 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22779 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT && 22780 "Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> requires <VK_EXT_directfb_surface>" ); 22781 # endif 22782 22783 VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); 22784 22785 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 22786 } 22787 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22788 #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ 22789 22790 //=== VK_EXT_vertex_input_dynamic_state === 22791 22792 template <typename Dispatch> setVertexInputEXT(uint32_t vertexBindingDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,Dispatch const & d) const22793 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, 22794 const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, 22795 uint32_t vertexAttributeDescriptionCount, 22796 const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, 22797 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22798 { 22799 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22800 d.vkCmdSetVertexInputEXT( m_commandBuffer, 22801 vertexBindingDescriptionCount, 22802 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ), 22803 vertexAttributeDescriptionCount, 22804 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) ); 22805 } 22806 22807 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22808 template <typename Dispatch> setVertexInputEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,Dispatch const & d) const22809 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( 22810 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, 22811 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions, 22812 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22813 { 22814 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22815 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22816 VULKAN_HPP_ASSERT( d.vkCmdSetVertexInputEXT && "Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" ); 22817 # endif 22818 22819 d.vkCmdSetVertexInputEXT( m_commandBuffer, 22820 vertexBindingDescriptions.size(), 22821 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ), 22822 vertexAttributeDescriptions.size(), 22823 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) ); 22824 } 22825 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22826 22827 #if defined( VK_USE_PLATFORM_FUCHSIA ) 22828 //=== VK_FUCHSIA_external_memory === 22829 22830 template <typename Dispatch> 22831 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const22832 Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 22833 zx_handle_t * pZirconHandle, 22834 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22835 { 22836 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22837 return static_cast<Result>( 22838 d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); 22839 } 22840 22841 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22842 template <typename Dispatch> 22843 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getMemoryZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const22844 Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const 22845 { 22846 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22847 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22848 VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandleFUCHSIA && "Function <vkGetMemoryZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_memory>" ); 22849 # endif 22850 22851 zx_handle_t zirconHandle; 22852 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22853 d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); 22854 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); 22855 22856 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); 22857 } 22858 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22859 22860 template <typename Dispatch> 22861 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,Dispatch const & d) const22862 Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 22863 zx_handle_t zirconHandle, 22864 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, 22865 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22866 { 22867 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22868 return static_cast<Result>( 22869 d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, 22870 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 22871 zirconHandle, 22872 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) ); 22873 } 22874 22875 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22876 template <typename Dispatch> 22877 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,Dispatch const & d) const22878 Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 22879 zx_handle_t zirconHandle, 22880 Dispatch const & d ) const 22881 { 22882 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22883 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22884 VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandlePropertiesFUCHSIA && 22885 "Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> requires <VK_FUCHSIA_external_memory>" ); 22886 # endif 22887 22888 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; 22889 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22890 d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, 22891 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 22892 zirconHandle, 22893 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) ); 22894 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); 22895 22896 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryZirconHandleProperties ) ); 22897 } 22898 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22899 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 22900 22901 #if defined( VK_USE_PLATFORM_FUCHSIA ) 22902 //=== VK_FUCHSIA_external_semaphore === 22903 22904 template <typename Dispatch> importSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,Dispatch const & d) const22905 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( 22906 const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22907 { 22908 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22909 return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 22910 m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) ); 22911 } 22912 22913 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22914 template <typename Dispatch> 22915 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,Dispatch const & d) const22916 Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, 22917 Dispatch const & d ) const 22918 { 22919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22920 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22921 VULKAN_HPP_ASSERT( d.vkImportSemaphoreZirconHandleFUCHSIA && "Function <vkImportSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" ); 22922 # endif 22923 22924 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 22925 m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) ); 22926 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); 22927 22928 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 22929 } 22930 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22931 22932 template <typename Dispatch> 22933 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const22934 Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 22935 zx_handle_t * pZirconHandle, 22936 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22937 { 22938 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22939 return static_cast<Result>( 22940 d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); 22941 } 22942 22943 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22944 template <typename Dispatch> 22945 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const22946 Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const 22947 { 22948 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22949 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22950 VULKAN_HPP_ASSERT( d.vkGetSemaphoreZirconHandleFUCHSIA && "Function <vkGetSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" ); 22951 # endif 22952 22953 zx_handle_t zirconHandle; 22954 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22955 d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); 22956 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); 22957 22958 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); 22959 } 22960 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22961 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 22962 22963 #if defined( VK_USE_PLATFORM_FUCHSIA ) 22964 //=== VK_FUCHSIA_buffer_collection === 22965 22966 template <typename Dispatch> 22967 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createBufferCollectionFUCHSIA(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,Dispatch const & d) const22968 Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, 22969 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22970 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, 22971 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22972 { 22973 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22974 return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device, 22975 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ), 22976 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22977 reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) ); 22978 } 22979 22980 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22981 template <typename Dispatch> 22982 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type createBufferCollectionFUCHSIA(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22983 Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, 22984 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22985 Dispatch const & d ) const 22986 { 22987 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22988 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 22989 VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 22990 # endif 22991 22992 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; 22993 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA( 22994 m_device, 22995 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), 22996 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22997 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); 22998 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); 22999 23000 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( collection ) ); 23001 } 23002 23003 # ifndef VULKAN_HPP_NO_SMART_HANDLE 23004 template <typename Dispatch> 23005 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type createBufferCollectionFUCHSIAUnique(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23006 Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, 23007 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23008 Dispatch const & d ) const 23009 { 23010 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23011 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23012 VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23013 # endif 23014 23015 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; 23016 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA( 23017 m_device, 23018 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), 23019 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23020 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); 23021 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); 23022 23023 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 23024 result, UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 23025 } 23026 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23027 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23028 23029 template <typename Dispatch> 23030 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result setBufferCollectionImageConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,Dispatch const & d) const23031 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23032 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, 23033 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23034 { 23035 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23036 return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( 23037 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) ); 23038 } 23039 23040 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23041 template <typename Dispatch> 23042 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setBufferCollectionImageConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,Dispatch const & d) const23043 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23044 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, 23045 Dispatch const & d ) const 23046 { 23047 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23048 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23049 VULKAN_HPP_ASSERT( d.vkSetBufferCollectionImageConstraintsFUCHSIA && 23050 "Function <vkSetBufferCollectionImageConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23051 # endif 23052 23053 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( 23054 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) ) ); 23055 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); 23056 23057 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 23058 } 23059 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23060 23061 template <typename Dispatch> 23062 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result setBufferCollectionBufferConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,Dispatch const & d) const23063 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23064 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, 23065 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23066 { 23067 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23068 return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( 23069 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) ); 23070 } 23071 23072 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23073 template <typename Dispatch> 23074 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setBufferCollectionBufferConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,Dispatch const & d) const23075 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23076 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, 23077 Dispatch const & d ) const 23078 { 23079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23080 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23081 VULKAN_HPP_ASSERT( d.vkSetBufferCollectionBufferConstraintsFUCHSIA && 23082 "Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23083 # endif 23084 23085 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( 23086 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) ) ); 23087 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); 23088 23089 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 23090 } 23091 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23092 23093 template <typename Dispatch> destroyBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23094 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23095 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23096 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23097 { 23098 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23099 d.vkDestroyBufferCollectionFUCHSIA( 23100 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23101 } 23102 23103 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23104 template <typename Dispatch> destroyBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23105 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23106 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23107 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23108 { 23109 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23110 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23111 VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23112 # endif 23113 23114 d.vkDestroyBufferCollectionFUCHSIA( 23115 m_device, 23116 static_cast<VkBufferCollectionFUCHSIA>( collection ), 23117 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23118 } 23119 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23120 23121 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23122 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23123 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23124 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23125 { 23126 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23127 d.vkDestroyBufferCollectionFUCHSIA( 23128 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23129 } 23130 23131 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23132 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23133 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23134 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23135 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23136 { 23137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23138 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23139 VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23140 # endif 23141 23142 d.vkDestroyBufferCollectionFUCHSIA( 23143 m_device, 23144 static_cast<VkBufferCollectionFUCHSIA>( collection ), 23145 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23146 } 23147 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23148 23149 template <typename Dispatch> 23150 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getBufferCollectionPropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,Dispatch const & d) const23151 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 23152 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, 23153 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23154 { 23155 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23156 return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( 23157 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) ); 23158 } 23159 23160 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23161 template <typename Dispatch> 23162 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type getBufferCollectionPropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Dispatch const & d) const23163 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const 23164 { 23165 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23166 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23167 VULKAN_HPP_ASSERT( d.vkGetBufferCollectionPropertiesFUCHSIA && 23168 "Function <vkGetBufferCollectionPropertiesFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); 23169 # endif 23170 23171 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; 23172 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( 23173 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) ) ); 23174 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); 23175 23176 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 23177 } 23178 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23179 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 23180 23181 //=== VK_HUAWEI_subpass_shading === 23182 23183 template <typename Dispatch> getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,Dispatch const & d) const23184 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, 23185 VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, 23186 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23187 { 23188 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23189 return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 23190 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) ); 23191 } 23192 23193 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23194 template <typename Dispatch> 23195 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Extent2D>::type getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,Dispatch const & d) const23196 Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const 23197 { 23198 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23199 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23200 VULKAN_HPP_ASSERT( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && 23201 "Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> requires <VK_HUAWEI_subpass_shading>" ); 23202 # endif 23203 23204 VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; 23205 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 23206 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) ); 23207 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); 23208 23209 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( maxWorkgroupSize ) ); 23210 } 23211 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23212 23213 template <typename Dispatch> subpassShadingHUAWEI(Dispatch const & d) const23214 VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23215 { 23216 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23217 d.vkCmdSubpassShadingHUAWEI( m_commandBuffer ); 23218 } 23219 23220 //=== VK_HUAWEI_invocation_mask === 23221 23222 template <typename Dispatch> bindInvocationMaskHUAWEI(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,Dispatch const & d) const23223 VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, 23224 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 23225 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23226 { 23227 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23228 d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); 23229 } 23230 23231 //=== VK_NV_external_memory_rdma === 23232 23233 template <typename Dispatch> 23234 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,Dispatch const & d) const23235 Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, 23236 VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, 23237 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23238 { 23239 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23240 return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( 23241 m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) ); 23242 } 23243 23244 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23245 template <typename Dispatch> 23246 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo,Dispatch const & d) const23247 Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const 23248 { 23249 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23250 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23251 VULKAN_HPP_ASSERT( d.vkGetMemoryRemoteAddressNV && "Function <vkGetMemoryRemoteAddressNV> requires <VK_NV_external_memory_rdma>" ); 23252 # endif 23253 23254 VULKAN_HPP_NAMESPACE::RemoteAddressNV address; 23255 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryRemoteAddressNV( 23256 m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) ) ); 23257 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); 23258 23259 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( address ) ); 23260 } 23261 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23262 23263 //=== VK_EXT_pipeline_properties === 23264 23265 template <typename Dispatch> getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo,VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,Dispatch const & d) const23266 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, 23267 VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, 23268 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23269 { 23270 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23271 return static_cast<Result>( d.vkGetPipelinePropertiesEXT( 23272 m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) ); 23273 } 23274 23275 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23276 template <typename Dispatch> 23277 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo,Dispatch const & d) const23278 Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const 23279 { 23280 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23281 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23282 VULKAN_HPP_ASSERT( d.vkGetPipelinePropertiesEXT && "Function <vkGetPipelinePropertiesEXT> requires <VK_EXT_pipeline_properties>" ); 23283 # endif 23284 23285 VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; 23286 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelinePropertiesEXT( 23287 m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) ) ); 23288 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); 23289 23290 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineProperties ) ); 23291 } 23292 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23293 23294 //=== VK_EXT_extended_dynamic_state2 === 23295 23296 template <typename Dispatch> setPatchControlPointsEXT(uint32_t patchControlPoints,Dispatch const & d) const23297 VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23298 { 23299 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23300 d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); 23301 } 23302 23303 template <typename Dispatch> setRasterizerDiscardEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const23304 VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, 23305 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23306 { 23307 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23308 d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); 23309 } 23310 23311 template <typename Dispatch> setDepthBiasEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const23312 VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23313 { 23314 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23315 d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); 23316 } 23317 23318 template <typename Dispatch> setLogicOpEXT(VULKAN_HPP_NAMESPACE::LogicOp logicOp,Dispatch const & d) const23319 VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23320 { 23321 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23322 d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) ); 23323 } 23324 23325 template <typename Dispatch> setPrimitiveRestartEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const23326 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, 23327 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23328 { 23329 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23330 d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); 23331 } 23332 23333 #if defined( VK_USE_PLATFORM_SCREEN_QNX ) 23334 //=== VK_QNX_screen_surface === 23335 23336 template <typename Dispatch> createScreenSurfaceQNX(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const23337 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, 23338 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23339 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 23340 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23341 { 23342 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23343 return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance, 23344 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ), 23345 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 23346 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 23347 } 23348 23349 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23350 template <typename Dispatch> 23351 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createScreenSurfaceQNX(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23352 Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, 23353 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23354 Dispatch const & d ) const 23355 { 23356 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23357 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23358 VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" ); 23359 # endif 23360 23361 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 23362 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX( 23363 m_instance, 23364 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 23365 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23366 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 23367 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); 23368 23369 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); 23370 } 23371 23372 # ifndef VULKAN_HPP_NO_SMART_HANDLE 23373 template <typename Dispatch> 23374 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createScreenSurfaceQNXUnique(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23375 Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, 23376 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23377 Dispatch const & d ) const 23378 { 23379 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23380 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23381 VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" ); 23382 # endif 23383 23384 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 23385 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX( 23386 m_instance, 23387 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 23388 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23389 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 23390 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); 23391 23392 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 23393 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 23394 } 23395 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23396 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23397 23398 template <typename Dispatch> getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window * window,Dispatch const & d) const23399 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, 23400 struct _screen_window * window, 23401 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23402 { 23403 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23404 return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); 23405 } 23406 23407 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23408 template <typename Dispatch> 23409 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window & window,Dispatch const & d) const23410 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23411 { 23412 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23413 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23414 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceScreenPresentationSupportQNX && 23415 "Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> requires <VK_QNX_screen_surface>" ); 23416 # endif 23417 23418 VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); 23419 23420 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 23421 } 23422 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23423 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ 23424 23425 //=== VK_EXT_color_write_enable === 23426 23427 template <typename Dispatch> setColorWriteEnableEXT(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,Dispatch const & d) const23428 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, 23429 const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, 23430 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23431 { 23432 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23433 d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) ); 23434 } 23435 23436 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23437 template <typename Dispatch> setColorWriteEnableEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,Dispatch const & d) const23438 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables, 23439 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23440 { 23441 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23442 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23443 VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteEnableEXT && "Function <vkCmdSetColorWriteEnableEXT> requires <VK_EXT_color_write_enable>" ); 23444 # endif 23445 23446 d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) ); 23447 } 23448 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23449 23450 //=== VK_KHR_ray_tracing_maintenance1 === 23451 23452 template <typename Dispatch> traceRaysIndirect2KHR(VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const23453 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 23454 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23455 { 23456 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23457 d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 23458 } 23459 23460 //=== VK_EXT_multi_draw === 23461 23462 template <typename Dispatch> drawMultiEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,Dispatch const & d) const23463 VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, 23464 const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, 23465 uint32_t instanceCount, 23466 uint32_t firstInstance, 23467 uint32_t stride, 23468 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23469 { 23470 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23471 d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride ); 23472 } 23473 23474 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23475 template <typename Dispatch> drawMultiEXT(VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,uint32_t instanceCount,uint32_t firstInstance,Dispatch const & d) const23476 VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo, 23477 uint32_t instanceCount, 23478 uint32_t firstInstance, 23479 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23480 { 23481 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23482 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23483 VULKAN_HPP_ASSERT( d.vkCmdDrawMultiEXT && "Function <vkCmdDrawMultiEXT> requires <VK_EXT_multi_draw>" ); 23484 # endif 23485 23486 d.vkCmdDrawMultiEXT( m_commandBuffer, 23487 vertexInfo.size(), 23488 reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ), 23489 instanceCount, 23490 firstInstance, 23491 vertexInfo.stride() ); 23492 } 23493 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23494 23495 template <typename Dispatch> drawMultiIndexedEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,const int32_t * pVertexOffset,Dispatch const & d) const23496 VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, 23497 const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, 23498 uint32_t instanceCount, 23499 uint32_t firstInstance, 23500 uint32_t stride, 23501 const int32_t * pVertexOffset, 23502 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23503 { 23504 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23505 d.vkCmdDrawMultiIndexedEXT( 23506 m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset ); 23507 } 23508 23509 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23510 template <typename Dispatch> 23511 VULKAN_HPP_INLINE void drawMultiIndexedEXT(VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,uint32_t instanceCount,uint32_t firstInstance,Optional<const int32_t> vertexOffset,Dispatch const & d) const23512 CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo, 23513 uint32_t instanceCount, 23514 uint32_t firstInstance, 23515 Optional<const int32_t> vertexOffset, 23516 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23517 { 23518 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23519 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23520 VULKAN_HPP_ASSERT( d.vkCmdDrawMultiIndexedEXT && "Function <vkCmdDrawMultiIndexedEXT> requires <VK_EXT_multi_draw>" ); 23521 # endif 23522 23523 d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, 23524 indexInfo.size(), 23525 reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ), 23526 instanceCount, 23527 firstInstance, 23528 indexInfo.stride(), 23529 static_cast<const int32_t *>( vertexOffset ) ); 23530 } 23531 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23532 23533 //=== VK_EXT_opacity_micromap === 23534 23535 template <typename Dispatch> createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,Dispatch const & d) const23536 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, 23537 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23538 VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, 23539 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23540 { 23541 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23542 return static_cast<Result>( d.vkCreateMicromapEXT( m_device, 23543 reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ), 23544 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 23545 reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) ); 23546 } 23547 23548 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23549 template <typename Dispatch> 23550 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23551 Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, 23552 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23553 Dispatch const & d ) const 23554 { 23555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23556 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23557 VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 23558 # endif 23559 23560 VULKAN_HPP_NAMESPACE::MicromapEXT micromap; 23561 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23562 d.vkCreateMicromapEXT( m_device, 23563 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), 23564 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23565 reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); 23566 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); 23567 23568 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( micromap ) ); 23569 } 23570 23571 # ifndef VULKAN_HPP_NO_SMART_HANDLE 23572 template <typename Dispatch> 23573 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type createMicromapEXTUnique(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23574 Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, 23575 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23576 Dispatch const & d ) const 23577 { 23578 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23579 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23580 VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 23581 # endif 23582 23583 VULKAN_HPP_NAMESPACE::MicromapEXT micromap; 23584 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23585 d.vkCreateMicromapEXT( m_device, 23586 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), 23587 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23588 reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); 23589 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); 23590 23591 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 23592 result, UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>( micromap, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 23593 } 23594 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23595 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23596 23597 template <typename Dispatch> destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23598 VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 23599 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23600 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23601 { 23602 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23603 d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23604 } 23605 23606 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23607 template <typename Dispatch> destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23608 VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 23609 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23610 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23611 { 23612 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23613 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23614 VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 23615 # endif 23616 23617 d.vkDestroyMicromapEXT( m_device, 23618 static_cast<VkMicromapEXT>( micromap ), 23619 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23620 } 23621 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23622 23623 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23624 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 23625 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23626 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23627 { 23628 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23629 d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23630 } 23631 23632 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23633 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23634 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 23635 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23636 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23637 { 23638 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23639 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23640 VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 23641 # endif 23642 23643 d.vkDestroyMicromapEXT( m_device, 23644 static_cast<VkMicromapEXT>( micromap ), 23645 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23646 } 23647 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23648 23649 template <typename Dispatch> buildMicromapsEXT(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,Dispatch const & d) const23650 VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount, 23651 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, 23652 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23653 { 23654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23655 d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ); 23656 } 23657 23658 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23659 template <typename Dispatch> buildMicromapsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,Dispatch const & d) const23660 VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, 23661 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23662 { 23663 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23664 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23665 VULKAN_HPP_ASSERT( d.vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" ); 23666 # endif 23667 23668 d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ); 23669 } 23670 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23671 23672 template <typename Dispatch> buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,uint32_t infoCount,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,Dispatch const & d) const23673 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 23674 uint32_t infoCount, 23675 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, 23676 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23677 { 23678 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23679 return static_cast<Result>( d.vkBuildMicromapsEXT( 23680 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) ); 23681 } 23682 23683 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23684 template <typename Dispatch> 23685 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,Dispatch const & d) const23686 Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 23687 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, 23688 Dispatch const & d ) const 23689 { 23690 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23691 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23692 VULKAN_HPP_ASSERT( d.vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" ); 23693 # endif 23694 23695 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBuildMicromapsEXT( 23696 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ) ); 23697 VULKAN_HPP_NAMESPACE::detail::resultCheck( 23698 result, 23699 VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", 23700 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 23701 23702 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 23703 } 23704 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23705 23706 template <typename Dispatch> copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,Dispatch const & d) const23707 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 23708 const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, 23709 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23710 { 23711 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23712 return static_cast<Result>( 23713 d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) ); 23714 } 23715 23716 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23717 template <typename Dispatch> copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,Dispatch const & d) const23718 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 23719 const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, 23720 Dispatch const & d ) const 23721 { 23722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23723 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23724 VULKAN_HPP_ASSERT( d.vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 23725 # endif 23726 23727 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23728 d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ) ); 23729 VULKAN_HPP_NAMESPACE::detail::resultCheck( 23730 result, 23731 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", 23732 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 23733 23734 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 23735 } 23736 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23737 23738 template <typename Dispatch> copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,Dispatch const & d) const23739 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 23740 const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, 23741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23742 { 23743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23744 return static_cast<Result>( d.vkCopyMicromapToMemoryEXT( 23745 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) ); 23746 } 23747 23748 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23749 template <typename Dispatch> copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,Dispatch const & d) const23750 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT( 23751 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const 23752 { 23753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23754 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23755 VULKAN_HPP_ASSERT( d.vkCopyMicromapToMemoryEXT && "Function <vkCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" ); 23756 # endif 23757 23758 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMicromapToMemoryEXT( 23759 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ) ); 23760 VULKAN_HPP_NAMESPACE::detail::resultCheck( 23761 result, 23762 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", 23763 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 23764 23765 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 23766 } 23767 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23768 23769 template <typename Dispatch> copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,Dispatch const & d) const23770 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 23771 const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, 23772 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23773 { 23774 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23775 return static_cast<Result>( d.vkCopyMemoryToMicromapEXT( 23776 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) ); 23777 } 23778 23779 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23780 template <typename Dispatch> copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,Dispatch const & d) const23781 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT( 23782 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const 23783 { 23784 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23785 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23786 VULKAN_HPP_ASSERT( d.vkCopyMemoryToMicromapEXT && "Function <vkCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 23787 # endif 23788 23789 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToMicromapEXT( 23790 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ) ); 23791 VULKAN_HPP_NAMESPACE::detail::resultCheck( 23792 result, 23793 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", 23794 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 23795 23796 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 23797 } 23798 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23799 23800 template <typename Dispatch> writeMicromapsPropertiesEXT(uint32_t micromapCount,const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,void * pData,size_t stride,Dispatch const & d) const23801 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount, 23802 const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, 23803 VULKAN_HPP_NAMESPACE::QueryType queryType, 23804 size_t dataSize, 23805 void * pData, 23806 size_t stride, 23807 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23808 { 23809 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23810 return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( 23811 m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) ); 23812 } 23813 23814 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23815 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 23816 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeMicromapsPropertiesEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,size_t stride,Dispatch const & d) const23817 Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 23818 VULKAN_HPP_NAMESPACE::QueryType queryType, 23819 size_t dataSize, 23820 size_t stride, 23821 Dispatch const & d ) const 23822 { 23823 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23824 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23825 VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); 23826 # endif 23827 23828 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 23829 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 23830 VULKAN_HPP_NAMESPACE::Result result = 23831 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device, 23832 micromaps.size(), 23833 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 23834 static_cast<VkQueryType>( queryType ), 23835 data.size() * sizeof( DataType ), 23836 reinterpret_cast<void *>( data.data() ), 23837 stride ) ); 23838 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); 23839 23840 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 23841 } 23842 23843 template <typename DataType, typename Dispatch> 23844 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type writeMicromapsPropertyEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t stride,Dispatch const & d) const23845 Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 23846 VULKAN_HPP_NAMESPACE::QueryType queryType, 23847 size_t stride, 23848 Dispatch const & d ) const 23849 { 23850 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23851 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23852 VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); 23853 # endif 23854 23855 DataType data; 23856 VULKAN_HPP_NAMESPACE::Result result = 23857 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device, 23858 micromaps.size(), 23859 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 23860 static_cast<VkQueryType>( queryType ), 23861 sizeof( DataType ), 23862 reinterpret_cast<void *>( &data ), 23863 stride ) ); 23864 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); 23865 23866 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 23867 } 23868 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23869 23870 template <typename Dispatch> copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,Dispatch const & d) const23871 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23872 { 23873 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23874 d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ); 23875 } 23876 23877 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23878 template <typename Dispatch> copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,Dispatch const & d) const23879 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23880 { 23881 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23882 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23883 VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 23884 # endif 23885 23886 d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ); 23887 } 23888 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23889 23890 template <typename Dispatch> copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,Dispatch const & d) const23891 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, 23892 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23893 { 23894 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23895 d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ); 23896 } 23897 23898 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23899 template <typename Dispatch> copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,Dispatch const & d) const23900 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, 23901 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23902 { 23903 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23904 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23905 VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapToMemoryEXT && "Function <vkCmdCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" ); 23906 # endif 23907 23908 d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ); 23909 } 23910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23911 23912 template <typename Dispatch> copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,Dispatch const & d) const23913 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, 23914 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23915 { 23916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23917 d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ); 23918 } 23919 23920 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23921 template <typename Dispatch> copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,Dispatch const & d) const23922 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, 23923 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23924 { 23925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23926 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23927 VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToMicromapEXT && "Function <vkCmdCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" ); 23928 # endif 23929 23930 d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ); 23931 } 23932 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23933 23934 template <typename Dispatch> writeMicromapsPropertiesEXT(uint32_t micromapCount,const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const23935 VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount, 23936 const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, 23937 VULKAN_HPP_NAMESPACE::QueryType queryType, 23938 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 23939 uint32_t firstQuery, 23940 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23941 { 23942 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23943 d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, 23944 micromapCount, 23945 reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), 23946 static_cast<VkQueryType>( queryType ), 23947 static_cast<VkQueryPool>( queryPool ), 23948 firstQuery ); 23949 } 23950 23951 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23952 template <typename Dispatch> 23953 VULKAN_HPP_INLINE void writeMicromapsPropertiesEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const23954 CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 23955 VULKAN_HPP_NAMESPACE::QueryType queryType, 23956 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 23957 uint32_t firstQuery, 23958 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23959 { 23960 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23961 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23962 VULKAN_HPP_ASSERT( d.vkCmdWriteMicromapsPropertiesEXT && "Function <vkCmdWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); 23963 # endif 23964 23965 d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, 23966 micromaps.size(), 23967 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 23968 static_cast<VkQueryType>( queryType ), 23969 static_cast<VkQueryPool>( queryPool ), 23970 firstQuery ); 23971 } 23972 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23973 23974 template <typename Dispatch> getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,Dispatch const & d) const23975 VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, 23976 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, 23977 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23978 { 23979 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23980 d.vkGetDeviceMicromapCompatibilityEXT( m_device, 23981 reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ), 23982 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); 23983 } 23984 23985 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23986 template <typename Dispatch> 23987 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo,Dispatch const & d) const23988 Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23989 { 23990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23991 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 23992 VULKAN_HPP_ASSERT( d.vkGetDeviceMicromapCompatibilityEXT && "Function <vkGetDeviceMicromapCompatibilityEXT> requires <VK_EXT_opacity_micromap>" ); 23993 # endif 23994 23995 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; 23996 d.vkGetDeviceMicromapCompatibilityEXT( m_device, 23997 reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ), 23998 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); 23999 24000 return compatibility; 24001 } 24002 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24003 24004 template <typename Dispatch> getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,Dispatch const & d) const24005 VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 24006 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, 24007 VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, 24008 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24009 { 24010 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24011 d.vkGetMicromapBuildSizesEXT( m_device, 24012 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 24013 reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ), 24014 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) ); 24015 } 24016 24017 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24018 template <typename Dispatch> 24019 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,Dispatch const & d) const24020 Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 24021 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, 24022 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24023 { 24024 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24025 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24026 VULKAN_HPP_ASSERT( d.vkGetMicromapBuildSizesEXT && "Function <vkGetMicromapBuildSizesEXT> requires <VK_EXT_opacity_micromap>" ); 24027 # endif 24028 24029 VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; 24030 d.vkGetMicromapBuildSizesEXT( m_device, 24031 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 24032 reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ), 24033 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) ); 24034 24035 return sizeInfo; 24036 } 24037 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24038 24039 //=== VK_HUAWEI_cluster_culling_shader === 24040 24041 template <typename Dispatch> 24042 VULKAN_HPP_INLINE void drawClusterHUAWEI(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const24043 CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24044 { 24045 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24046 d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 24047 } 24048 24049 template <typename Dispatch> drawClusterIndirectHUAWEI(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const24050 VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, 24051 VULKAN_HPP_NAMESPACE::DeviceSize offset, 24052 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24053 { 24054 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24055 d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); 24056 } 24057 24058 //=== VK_EXT_pageable_device_local_memory === 24059 24060 template <typename Dispatch> setMemoryPriorityEXT(VULKAN_HPP_NAMESPACE::DeviceMemory memory,float priority,Dispatch const & d) const24061 VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24062 { 24063 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24064 d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority ); 24065 } 24066 24067 //=== VK_KHR_maintenance4 === 24068 24069 template <typename Dispatch> getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const24070 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, 24071 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 24072 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24073 { 24074 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24075 d.vkGetDeviceBufferMemoryRequirementsKHR( 24076 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 24077 } 24078 24079 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24080 template <typename Dispatch> 24081 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const24082 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24083 { 24084 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24085 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24086 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && 24087 "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24088 # endif 24089 24090 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 24091 d.vkGetDeviceBufferMemoryRequirementsKHR( 24092 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24093 24094 return memoryRequirements; 24095 } 24096 24097 template <typename X, typename Y, typename... Z, typename Dispatch> 24098 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const24099 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24100 { 24101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24102 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24103 VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && 24104 "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24105 # endif 24106 24107 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 24108 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 24109 d.vkGetDeviceBufferMemoryRequirementsKHR( 24110 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24111 24112 return structureChain; 24113 } 24114 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24115 24116 template <typename Dispatch> getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const24117 VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 24118 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 24119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24120 { 24121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24122 d.vkGetDeviceImageMemoryRequirementsKHR( 24123 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 24124 } 24125 24126 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24127 template <typename Dispatch> 24128 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const24129 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24130 { 24131 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24132 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24133 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && 24134 "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24135 # endif 24136 24137 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 24138 d.vkGetDeviceImageMemoryRequirementsKHR( 24139 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24140 24141 return memoryRequirements; 24142 } 24143 24144 template <typename X, typename Y, typename... Z, typename Dispatch> 24145 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const24146 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24147 { 24148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24149 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24150 VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && 24151 "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24152 # endif 24153 24154 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 24155 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 24156 d.vkGetDeviceImageMemoryRequirementsKHR( 24157 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24158 24159 return structureChain; 24160 } 24161 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24162 24163 template <typename Dispatch> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const24164 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 24165 uint32_t * pSparseMemoryRequirementCount, 24166 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 24167 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24168 { 24169 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24170 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 24171 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 24172 pSparseMemoryRequirementCount, 24173 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 24174 } 24175 24176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24177 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 24178 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const24179 Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const 24180 { 24181 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24182 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24183 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && 24184 "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24185 # endif 24186 24187 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 24188 uint32_t sparseMemoryRequirementCount; 24189 d.vkGetDeviceImageSparseMemoryRequirementsKHR( 24190 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 24191 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 24192 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 24193 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 24194 &sparseMemoryRequirementCount, 24195 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 24196 24197 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 24198 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 24199 { 24200 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 24201 } 24202 return sparseMemoryRequirements; 24203 } 24204 24205 template <typename SparseImageMemoryRequirements2Allocator, 24206 typename Dispatch, 24207 typename std::enable_if< 24208 std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, 24209 int>::type> 24210 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const24211 Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, 24212 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 24213 Dispatch const & d ) const 24214 { 24215 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24216 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24217 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && 24218 "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); 24219 # endif 24220 24221 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 24222 sparseImageMemoryRequirements2Allocator ); 24223 uint32_t sparseMemoryRequirementCount; 24224 d.vkGetDeviceImageSparseMemoryRequirementsKHR( 24225 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 24226 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 24227 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 24228 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 24229 &sparseMemoryRequirementCount, 24230 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 24231 24232 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 24233 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 24234 { 24235 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 24236 } 24237 return sparseMemoryRequirements; 24238 } 24239 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24240 24241 //=== VK_VALVE_descriptor_set_host_mapping === 24242 24243 template <typename Dispatch> getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping,Dispatch const & d) const24244 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, 24245 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, 24246 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24247 { 24248 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24249 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, 24250 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ), 24251 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) ); 24252 } 24253 24254 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24255 template <typename Dispatch> 24256 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference,Dispatch const & d) const24257 Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, 24258 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24259 { 24260 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24261 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24262 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutHostMappingInfoVALVE && 24263 "Function <vkGetDescriptorSetLayoutHostMappingInfoVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" ); 24264 # endif 24265 24266 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; 24267 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, 24268 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ), 24269 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) ); 24270 24271 return hostMapping; 24272 } 24273 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24274 24275 template <typename Dispatch> 24276 VULKAN_HPP_INLINE void getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,void ** ppData,Dispatch const & d) const24277 Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24278 { 24279 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24280 d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData ); 24281 } 24282 24283 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24284 template <typename Dispatch> getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,Dispatch const & d) const24285 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 24286 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24287 { 24288 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24289 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24290 VULKAN_HPP_ASSERT( d.vkGetDescriptorSetHostMappingVALVE && 24291 "Function <vkGetDescriptorSetHostMappingVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" ); 24292 # endif 24293 24294 void * pData; 24295 d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData ); 24296 24297 return pData; 24298 } 24299 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24300 24301 //=== VK_NV_copy_memory_indirect === 24302 24303 template <typename Dispatch> copyMemoryIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t copyCount,uint32_t stride,Dispatch const & d) const24304 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 24305 uint32_t copyCount, 24306 uint32_t stride, 24307 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24308 { 24309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24310 d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride ); 24311 } 24312 24313 template <typename Dispatch> copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t copyCount,uint32_t stride,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources,Dispatch const & d) const24314 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 24315 uint32_t copyCount, 24316 uint32_t stride, 24317 VULKAN_HPP_NAMESPACE::Image dstImage, 24318 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 24319 const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, 24320 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24321 { 24322 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24323 d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, 24324 static_cast<VkDeviceAddress>( copyBufferAddress ), 24325 copyCount, 24326 stride, 24327 static_cast<VkImage>( dstImage ), 24328 static_cast<VkImageLayout>( dstImageLayout ), 24329 reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) ); 24330 } 24331 24332 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24333 template <typename Dispatch> 24334 VULKAN_HPP_INLINE void copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t stride,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources,Dispatch const & d) const24335 CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 24336 uint32_t stride, 24337 VULKAN_HPP_NAMESPACE::Image dstImage, 24338 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 24339 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources, 24340 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24341 { 24342 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24343 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24344 VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToImageIndirectNV && "Function <vkCmdCopyMemoryToImageIndirectNV> requires <VK_NV_copy_memory_indirect>" ); 24345 # endif 24346 24347 d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, 24348 static_cast<VkDeviceAddress>( copyBufferAddress ), 24349 imageSubresources.size(), 24350 stride, 24351 static_cast<VkImage>( dstImage ), 24352 static_cast<VkImageLayout>( dstImageLayout ), 24353 reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) ); 24354 } 24355 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24356 24357 //=== VK_NV_memory_decompression === 24358 24359 template <typename Dispatch> decompressMemoryNV(uint32_t decompressRegionCount,const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions,Dispatch const & d) const24360 VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, 24361 const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, 24362 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24363 { 24364 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24365 d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) ); 24366 } 24367 24368 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24369 template <typename Dispatch> 24370 VULKAN_HPP_INLINE void decompressMemoryNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions,Dispatch const & d) const24371 CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions, 24372 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24373 { 24374 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24375 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24376 VULKAN_HPP_ASSERT( d.vkCmdDecompressMemoryNV && "Function <vkCmdDecompressMemoryNV> requires <VK_NV_memory_decompression>" ); 24377 # endif 24378 24379 d.vkCmdDecompressMemoryNV( 24380 m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) ); 24381 } 24382 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24383 24384 template <typename Dispatch> decompressMemoryIndirectCountNV(VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,uint32_t stride,Dispatch const & d) const24385 VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, 24386 VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, 24387 uint32_t stride, 24388 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24389 { 24390 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24391 d.vkCmdDecompressMemoryIndirectCountNV( 24392 m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride ); 24393 } 24394 24395 //=== VK_NV_device_generated_commands_compute === 24396 24397 template <typename Dispatch> getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const24398 VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, 24399 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 24400 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24401 { 24402 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24403 d.vkGetPipelineIndirectMemoryRequirementsNV( 24404 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 24405 } 24406 24407 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24408 template <typename Dispatch> 24409 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Dispatch const & d) const24410 Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 24411 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24412 { 24413 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24414 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24415 VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && 24416 "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" ); 24417 # endif 24418 24419 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 24420 d.vkGetPipelineIndirectMemoryRequirementsNV( 24421 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24422 24423 return memoryRequirements; 24424 } 24425 24426 template <typename X, typename Y, typename... Z, typename Dispatch> 24427 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Dispatch const & d) const24428 Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 24429 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24430 { 24431 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24432 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24433 VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && 24434 "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" ); 24435 # endif 24436 24437 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 24438 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 24439 d.vkGetPipelineIndirectMemoryRequirementsNV( 24440 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 24441 24442 return structureChain; 24443 } 24444 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24445 24446 template <typename Dispatch> updatePipelineIndirectBufferNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const24447 VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 24448 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 24449 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24450 { 24451 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24452 d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); 24453 } 24454 24455 template <typename Dispatch> getPipelineIndirectAddressNV(const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo,Dispatch const & d) const24456 VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, 24457 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24458 { 24459 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24460 return static_cast<DeviceAddress>( 24461 d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) ); 24462 } 24463 24464 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24465 template <typename Dispatch> 24466 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV(const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info,Dispatch const & d) const24467 Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24468 { 24469 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24470 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24471 VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectDeviceAddressNV && 24472 "Function <vkGetPipelineIndirectDeviceAddressNV> requires <VK_NV_device_generated_commands_compute>" ); 24473 # endif 24474 24475 VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) ); 24476 24477 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 24478 } 24479 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24480 24481 //=== VK_EXT_extended_dynamic_state3 === 24482 24483 template <typename Dispatch> setDepthClampEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable,Dispatch const & d) const24484 VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24485 { 24486 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24487 d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) ); 24488 } 24489 24490 template <typename Dispatch> setPolygonModeEXT(VULKAN_HPP_NAMESPACE::PolygonMode polygonMode,Dispatch const & d) const24491 VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24492 { 24493 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24494 d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) ); 24495 } 24496 24497 template <typename Dispatch> setRasterizationSamplesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,Dispatch const & d) const24498 VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, 24499 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24500 { 24501 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24502 d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) ); 24503 } 24504 24505 template <typename Dispatch> setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,Dispatch const & d) const24506 VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 24507 const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, 24508 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24509 { 24510 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24511 d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) ); 24512 } 24513 24514 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24515 template <typename Dispatch> setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,Dispatch const & d) const24516 VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 24517 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask, 24518 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 24519 { 24520 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24521 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24522 VULKAN_HPP_ASSERT( d.vkCmdSetSampleMaskEXT && "Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 24523 # endif 24524 # ifdef VULKAN_HPP_NO_EXCEPTIONS 24525 VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast<uint32_t>( samples ) + 31 ) / 32 ); 24526 # else 24527 if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 ) 24528 { 24529 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32" ); 24530 } 24531 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 24532 24533 d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) ); 24534 } 24535 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24536 24537 template <typename Dispatch> setAlphaToCoverageEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,Dispatch const & d) const24538 VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, 24539 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24540 { 24541 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24542 d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) ); 24543 } 24544 24545 template <typename Dispatch> setAlphaToOneEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable,Dispatch const & d) const24546 VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24547 { 24548 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24549 d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) ); 24550 } 24551 24552 template <typename Dispatch> setLogicOpEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable,Dispatch const & d) const24553 VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24554 { 24555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24556 d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) ); 24557 } 24558 24559 template <typename Dispatch> setColorBlendEnableEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,Dispatch const & d) const24560 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, 24561 uint32_t attachmentCount, 24562 const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, 24563 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24564 { 24565 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24566 d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) ); 24567 } 24568 24569 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24570 template <typename Dispatch> setColorBlendEnableEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,Dispatch const & d) const24571 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, 24572 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables, 24573 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24574 { 24575 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24576 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24577 VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEnableEXT && 24578 "Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 24579 # endif 24580 24581 d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) ); 24582 } 24583 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24584 24585 template <typename Dispatch> setColorBlendEquationEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,Dispatch const & d) const24586 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, 24587 uint32_t attachmentCount, 24588 const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, 24589 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24590 { 24591 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24592 d.vkCmdSetColorBlendEquationEXT( 24593 m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) ); 24594 } 24595 24596 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24597 template <typename Dispatch> 24598 VULKAN_HPP_INLINE void setColorBlendEquationEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,Dispatch const & d) const24599 CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, 24600 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations, 24601 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24602 { 24603 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24604 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24605 VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEquationEXT && 24606 "Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 24607 # endif 24608 24609 d.vkCmdSetColorBlendEquationEXT( 24610 m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) ); 24611 } 24612 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24613 24614 template <typename Dispatch> setColorWriteMaskEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,Dispatch const & d) const24615 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, 24616 uint32_t attachmentCount, 24617 const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, 24618 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24619 { 24620 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24621 d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) ); 24622 } 24623 24624 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24625 template <typename Dispatch> 24626 VULKAN_HPP_INLINE void setColorWriteMaskEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,Dispatch const & d) const24627 CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, 24628 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks, 24629 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24630 { 24631 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24632 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24633 VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteMaskEXT && 24634 "Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 24635 # endif 24636 24637 d.vkCmdSetColorWriteMaskEXT( 24638 m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) ); 24639 } 24640 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24641 24642 template <typename Dispatch> setTessellationDomainOriginEXT(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,Dispatch const & d) const24643 VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, 24644 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24645 { 24646 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24647 d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) ); 24648 } 24649 24650 template <typename Dispatch> setRasterizationStreamEXT(uint32_t rasterizationStream,Dispatch const & d) const24651 VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24652 { 24653 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24654 d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream ); 24655 } 24656 24657 template <typename Dispatch> 24658 VULKAN_HPP_INLINE void setConservativeRasterizationModeEXT(VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,Dispatch const & d) const24659 CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, 24660 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24661 { 24662 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24663 d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) ); 24664 } 24665 24666 template <typename Dispatch> setExtraPrimitiveOverestimationSizeEXT(float extraPrimitiveOverestimationSize,Dispatch const & d) const24667 VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, 24668 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24669 { 24670 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24671 d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize ); 24672 } 24673 24674 template <typename Dispatch> setDepthClipEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable,Dispatch const & d) const24675 VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24676 { 24677 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24678 d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) ); 24679 } 24680 24681 template <typename Dispatch> setSampleLocationsEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,Dispatch const & d) const24682 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, 24683 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24684 { 24685 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24686 d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) ); 24687 } 24688 24689 template <typename Dispatch> setColorBlendAdvancedEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,Dispatch const & d) const24690 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, 24691 uint32_t attachmentCount, 24692 const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, 24693 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24694 { 24695 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24696 d.vkCmdSetColorBlendAdvancedEXT( 24697 m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) ); 24698 } 24699 24700 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24701 template <typename Dispatch> 24702 VULKAN_HPP_INLINE void setColorBlendAdvancedEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,Dispatch const & d) const24703 CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, 24704 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced, 24705 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24706 { 24707 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24708 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24709 VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendAdvancedEXT && 24710 "Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 24711 # endif 24712 24713 d.vkCmdSetColorBlendAdvancedEXT( 24714 m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) ); 24715 } 24716 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24717 24718 template <typename Dispatch> setProvokingVertexModeEXT(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,Dispatch const & d) const24719 VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, 24720 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24721 { 24722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24723 d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) ); 24724 } 24725 24726 template <typename Dispatch> setLineRasterizationModeEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,Dispatch const & d) const24727 VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, 24728 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24729 { 24730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24731 d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) ); 24732 } 24733 24734 template <typename Dispatch> setLineStippleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable,Dispatch const & d) const24735 VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24736 { 24737 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24738 d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) ); 24739 } 24740 24741 template <typename Dispatch> setDepthClipNegativeOneToOneEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,Dispatch const & d) const24742 VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, 24743 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24744 { 24745 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24746 d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) ); 24747 } 24748 24749 template <typename Dispatch> setViewportWScalingEnableNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,Dispatch const & d) const24750 VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, 24751 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24752 { 24753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24754 d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) ); 24755 } 24756 24757 template <typename Dispatch> setViewportSwizzleNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,Dispatch const & d) const24758 VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, 24759 uint32_t viewportCount, 24760 const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, 24761 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24762 { 24763 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24764 d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) ); 24765 } 24766 24767 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24768 template <typename Dispatch> 24769 VULKAN_HPP_INLINE void setViewportSwizzleNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,Dispatch const & d) const24770 CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, 24771 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles, 24772 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24773 { 24774 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24775 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24776 VULKAN_HPP_ASSERT( d.vkCmdSetViewportSwizzleNV && 24777 "Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 24778 # endif 24779 24780 d.vkCmdSetViewportSwizzleNV( 24781 m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) ); 24782 } 24783 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24784 24785 template <typename Dispatch> setCoverageToColorEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,Dispatch const & d) const24786 VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, 24787 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24788 { 24789 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24790 d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) ); 24791 } 24792 24793 template <typename Dispatch> setCoverageToColorLocationNV(uint32_t coverageToColorLocation,Dispatch const & d) const24794 VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24795 { 24796 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24797 d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation ); 24798 } 24799 24800 template <typename Dispatch> setCoverageModulationModeNV(VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,Dispatch const & d) const24801 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, 24802 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24803 { 24804 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24805 d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) ); 24806 } 24807 24808 template <typename Dispatch> setCoverageModulationTableEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,Dispatch const & d) const24809 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, 24810 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24811 { 24812 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24813 d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) ); 24814 } 24815 24816 template <typename Dispatch> setCoverageModulationTableNV(uint32_t coverageModulationTableCount,const float * pCoverageModulationTable,Dispatch const & d) const24817 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount, 24818 const float * pCoverageModulationTable, 24819 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24820 { 24821 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24822 d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); 24823 } 24824 24825 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24826 template <typename Dispatch> setCoverageModulationTableNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,Dispatch const & d) const24827 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable, 24828 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24829 { 24830 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24831 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24832 VULKAN_HPP_ASSERT( d.vkCmdSetCoverageModulationTableNV && 24833 "Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); 24834 # endif 24835 24836 d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() ); 24837 } 24838 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24839 24840 template <typename Dispatch> setShadingRateImageEnableNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,Dispatch const & d) const24841 VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, 24842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24843 { 24844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24845 d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) ); 24846 } 24847 24848 template <typename Dispatch> setRepresentativeFragmentTestEnableNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,Dispatch const & d) const24849 VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, 24850 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24851 { 24852 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24853 d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) ); 24854 } 24855 24856 template <typename Dispatch> setCoverageReductionModeNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,Dispatch const & d) const24857 VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, 24858 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24859 { 24860 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24861 d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) ); 24862 } 24863 24864 //=== VK_EXT_shader_module_identifier === 24865 24866 template <typename Dispatch> getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,Dispatch const & d) const24867 VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 24868 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, 24869 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24870 { 24871 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24872 d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); 24873 } 24874 24875 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24876 template <typename Dispatch> 24877 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Dispatch const & d) const24878 Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24879 { 24880 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24881 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24882 VULKAN_HPP_ASSERT( d.vkGetShaderModuleIdentifierEXT && "Function <vkGetShaderModuleIdentifierEXT> requires <VK_EXT_shader_module_identifier>" ); 24883 # endif 24884 24885 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; 24886 d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); 24887 24888 return identifier; 24889 } 24890 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24891 24892 template <typename Dispatch> getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,Dispatch const & d) const24893 VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, 24894 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, 24895 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24896 { 24897 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24898 d.vkGetShaderModuleCreateInfoIdentifierEXT( 24899 m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); 24900 } 24901 24902 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24903 template <typename Dispatch> 24904 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Dispatch const & d) const24905 Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 24906 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24907 { 24908 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24909 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24910 VULKAN_HPP_ASSERT( d.vkGetShaderModuleCreateInfoIdentifierEXT && 24911 "Function <vkGetShaderModuleCreateInfoIdentifierEXT> requires <VK_EXT_shader_module_identifier>" ); 24912 # endif 24913 24914 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; 24915 d.vkGetShaderModuleCreateInfoIdentifierEXT( 24916 m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); 24917 24918 return identifier; 24919 } 24920 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 24921 24922 //=== VK_NV_optical_flow === 24923 24924 template <typename Dispatch> 24925 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,uint32_t * pFormatCount,VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,Dispatch const & d) const24926 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, 24927 uint32_t * pFormatCount, 24928 VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, 24929 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 24930 { 24931 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24932 return static_cast<Result>( 24933 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 24934 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ), 24935 pFormatCount, 24936 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) ); 24937 } 24938 24939 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 24940 template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch> 24941 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 24942 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,Dispatch const & d) const24943 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, 24944 Dispatch const & d ) const 24945 { 24946 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24947 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24948 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && 24949 "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" ); 24950 # endif 24951 24952 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties; 24953 uint32_t formatCount; 24954 VULKAN_HPP_NAMESPACE::Result result; 24955 do 24956 { 24957 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( 24958 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); 24959 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) 24960 { 24961 imageFormatProperties.resize( formatCount ); 24962 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 24963 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 24964 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), 24965 &formatCount, 24966 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); 24967 } 24968 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 24969 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); 24970 VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); 24971 if ( formatCount < imageFormatProperties.size() ) 24972 { 24973 imageFormatProperties.resize( formatCount ); 24974 } 24975 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 24976 } 24977 24978 template <typename OpticalFlowImageFormatPropertiesNVAllocator, 24979 typename Dispatch, 24980 typename std::enable_if< 24981 std::is_same<typename OpticalFlowImageFormatPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value, 24982 int>::type> 24983 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 24984 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,Dispatch const & d) const24985 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, 24986 OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, 24987 Dispatch const & d ) const 24988 { 24989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 24990 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 24991 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && 24992 "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" ); 24993 # endif 24994 24995 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties( 24996 opticalFlowImageFormatPropertiesNVAllocator ); 24997 uint32_t formatCount; 24998 VULKAN_HPP_NAMESPACE::Result result; 24999 do 25000 { 25001 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( 25002 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); 25003 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) 25004 { 25005 imageFormatProperties.resize( formatCount ); 25006 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25007 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 25008 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), 25009 &formatCount, 25010 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); 25011 } 25012 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 25013 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); 25014 VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); 25015 if ( formatCount < imageFormatProperties.size() ) 25016 { 25017 imageFormatProperties.resize( formatCount ); 25018 } 25019 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); 25020 } 25021 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25022 25023 template <typename Dispatch> createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,Dispatch const & d) const25024 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, 25025 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25026 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, 25027 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25028 { 25029 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25030 return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device, 25031 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ), 25032 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 25033 reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) ); 25034 } 25035 25036 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25037 template <typename Dispatch> 25038 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25039 Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, 25040 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25041 Dispatch const & d ) const 25042 { 25043 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25044 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25045 VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 25046 # endif 25047 25048 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; 25049 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV( 25050 m_device, 25051 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), 25052 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25053 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); 25054 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); 25055 25056 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( session ) ); 25057 } 25058 25059 # ifndef VULKAN_HPP_NO_SMART_HANDLE 25060 template <typename Dispatch> 25061 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type createOpticalFlowSessionNVUnique(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25062 Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, 25063 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25064 Dispatch const & d ) const 25065 { 25066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25067 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25068 VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 25069 # endif 25070 25071 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; 25072 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV( 25073 m_device, 25074 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), 25075 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25076 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); 25077 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); 25078 25079 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( 25080 result, UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>( session, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 25081 } 25082 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 25083 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25084 25085 template <typename Dispatch> destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const25086 VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25087 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25088 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25089 { 25090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25091 d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 25092 } 25093 25094 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25095 template <typename Dispatch> destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25096 VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25097 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25098 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25099 { 25100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25101 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25102 VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 25103 # endif 25104 25105 d.vkDestroyOpticalFlowSessionNV( 25106 m_device, 25107 static_cast<VkOpticalFlowSessionNV>( session ), 25108 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 25109 } 25110 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25111 25112 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const25113 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25114 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25115 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25116 { 25117 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25118 d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 25119 } 25120 25121 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25122 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25123 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25124 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25125 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25126 { 25127 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25128 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25129 VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); 25130 # endif 25131 25132 d.vkDestroyOpticalFlowSessionNV( 25133 m_device, 25134 static_cast<VkOpticalFlowSessionNV>( session ), 25135 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 25136 } 25137 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25138 25139 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 25140 template <typename Dispatch> bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,VULKAN_HPP_NAMESPACE::ImageView view,VULKAN_HPP_NAMESPACE::ImageLayout layout,Dispatch const & d) const25141 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25142 VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, 25143 VULKAN_HPP_NAMESPACE::ImageView view, 25144 VULKAN_HPP_NAMESPACE::ImageLayout layout, 25145 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25146 { 25147 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25148 return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device, 25149 static_cast<VkOpticalFlowSessionNV>( session ), 25150 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), 25151 static_cast<VkImageView>( view ), 25152 static_cast<VkImageLayout>( layout ) ) ); 25153 } 25154 #else 25155 template <typename Dispatch> 25156 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,VULKAN_HPP_NAMESPACE::ImageView view,VULKAN_HPP_NAMESPACE::ImageLayout layout,Dispatch const & d) const25157 Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25158 VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, 25159 VULKAN_HPP_NAMESPACE::ImageView view, 25160 VULKAN_HPP_NAMESPACE::ImageLayout layout, 25161 Dispatch const & d ) const 25162 { 25163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25164 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25165 VULKAN_HPP_ASSERT( d.vkBindOpticalFlowSessionImageNV && "Function <vkBindOpticalFlowSessionImageNV> requires <VK_NV_optical_flow>" ); 25166 # endif 25167 25168 VULKAN_HPP_NAMESPACE::Result result = 25169 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindOpticalFlowSessionImageNV( m_device, 25170 static_cast<VkOpticalFlowSessionNV>( session ), 25171 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), 25172 static_cast<VkImageView>( view ), 25173 static_cast<VkImageLayout>( layout ) ) ); 25174 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); 25175 25176 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 25177 } 25178 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 25179 25180 template <typename Dispatch> opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,Dispatch const & d) const25181 VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25182 const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, 25183 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25184 { 25185 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25186 d.vkCmdOpticalFlowExecuteNV( 25187 m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) ); 25188 } 25189 25190 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25191 template <typename Dispatch> opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,Dispatch const & d) const25192 VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 25193 const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, 25194 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25195 { 25196 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25197 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25198 VULKAN_HPP_ASSERT( d.vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> requires <VK_NV_optical_flow>" ); 25199 # endif 25200 25201 d.vkCmdOpticalFlowExecuteNV( 25202 m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) ); 25203 } 25204 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25205 25206 //=== VK_KHR_maintenance5 === 25207 25208 template <typename Dispatch> bindIndexBuffer2KHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const25209 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 25210 VULKAN_HPP_NAMESPACE::DeviceSize offset, 25211 VULKAN_HPP_NAMESPACE::DeviceSize size, 25212 VULKAN_HPP_NAMESPACE::IndexType indexType, 25213 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25214 { 25215 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25216 d.vkCmdBindIndexBuffer2KHR( m_commandBuffer, 25217 static_cast<VkBuffer>( buffer ), 25218 static_cast<VkDeviceSize>( offset ), 25219 static_cast<VkDeviceSize>( size ), 25220 static_cast<VkIndexType>( indexType ) ); 25221 } 25222 25223 template <typename Dispatch> getRenderingAreaGranularityKHR(const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const25224 VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo, 25225 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 25226 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25227 { 25228 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25229 d.vkGetRenderingAreaGranularityKHR( 25230 m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 25231 } 25232 25233 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25234 template <typename Dispatch> 25235 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D getRenderingAreaGranularityKHR(const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo,Dispatch const & d) const25236 Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25237 { 25238 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25239 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25240 VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularityKHR && "Function <vkGetRenderingAreaGranularityKHR> requires <VK_KHR_maintenance5>" ); 25241 # endif 25242 25243 VULKAN_HPP_NAMESPACE::Extent2D granularity; 25244 d.vkGetRenderingAreaGranularityKHR( 25245 m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 25246 25247 return granularity; 25248 } 25249 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25250 25251 template <typename Dispatch> getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo,VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,Dispatch const & d) const25252 VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo, 25253 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, 25254 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25255 { 25256 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25257 d.vkGetDeviceImageSubresourceLayoutKHR( 25258 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); 25259 } 25260 25261 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25262 template <typename Dispatch> 25263 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info,Dispatch const & d) const25264 Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25265 { 25266 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25267 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25268 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" ); 25269 # endif 25270 25271 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; 25272 d.vkGetDeviceImageSubresourceLayoutKHR( 25273 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 25274 25275 return layout; 25276 } 25277 25278 template <typename X, typename Y, typename... Z, typename Dispatch> 25279 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info,Dispatch const & d) const25280 Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25281 { 25282 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25283 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25284 VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" ); 25285 # endif 25286 25287 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 25288 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>(); 25289 d.vkGetDeviceImageSubresourceLayoutKHR( 25290 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 25291 25292 return structureChain; 25293 } 25294 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25295 25296 template <typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,Dispatch const & d) const25297 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, 25298 const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, 25299 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, 25300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25301 { 25302 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25303 d.vkGetImageSubresourceLayout2KHR( m_device, 25304 static_cast<VkImage>( image ), 25305 reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ), 25306 reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); 25307 } 25308 25309 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25310 template <typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,Dispatch const & d) const25311 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2KHR( 25312 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25313 { 25314 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25315 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25316 VULKAN_HPP_ASSERT( 25317 d.vkGetImageSubresourceLayout2KHR && 25318 "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" ); 25319 # endif 25320 25321 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; 25322 d.vkGetImageSubresourceLayout2KHR( m_device, 25323 static_cast<VkImage>( image ), 25324 reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ), 25325 reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 25326 25327 return layout; 25328 } 25329 25330 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,Dispatch const & d) const25331 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2KHR( 25332 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25333 { 25334 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25335 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25336 VULKAN_HPP_ASSERT( 25337 d.vkGetImageSubresourceLayout2KHR && 25338 "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" ); 25339 # endif 25340 25341 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 25342 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>(); 25343 d.vkGetImageSubresourceLayout2KHR( m_device, 25344 static_cast<VkImage>( image ), 25345 reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ), 25346 reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 25347 25348 return structureChain; 25349 } 25350 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25351 25352 //=== VK_EXT_shader_object === 25353 25354 template <typename Dispatch> createShadersEXT(uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,Dispatch const & d) const25355 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount, 25356 const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, 25357 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25358 VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, 25359 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25360 { 25361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25362 return static_cast<Result>( d.vkCreateShadersEXT( m_device, 25363 createInfoCount, 25364 reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ), 25365 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 25366 reinterpret_cast<VkShaderEXT *>( pShaders ) ) ); 25367 } 25368 25369 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25370 template <typename ShaderEXTAllocator, typename Dispatch> 25371 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>> createShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25372 Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 25373 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25374 Dispatch const & d ) const 25375 { 25376 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25377 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25378 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25379 # endif 25380 25381 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size() ); 25382 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25383 d.vkCreateShadersEXT( m_device, 25384 createInfos.size(), 25385 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 25386 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25387 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 25388 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25389 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", 25390 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25391 25392 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>( result, std::move( shaders ) ); 25393 } 25394 25395 template <typename ShaderEXTAllocator, 25396 typename Dispatch, 25397 typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, VULKAN_HPP_NAMESPACE::ShaderEXT>::value, int>::type> 25398 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>> createShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,ShaderEXTAllocator & shaderEXTAllocator,Dispatch const & d) const25399 Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 25400 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25401 ShaderEXTAllocator & shaderEXTAllocator, 25402 Dispatch const & d ) const 25403 { 25404 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25405 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25406 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25407 # endif 25408 25409 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size(), shaderEXTAllocator ); 25410 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25411 d.vkCreateShadersEXT( m_device, 25412 createInfos.size(), 25413 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 25414 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25415 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 25416 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25417 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", 25418 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25419 25420 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>( result, std::move( shaders ) ); 25421 } 25422 25423 template <typename Dispatch> 25424 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::ShaderEXT> createShaderEXT(const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25425 Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, 25426 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25427 Dispatch const & d ) const 25428 { 25429 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25430 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25431 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25432 # endif 25433 25434 VULKAN_HPP_NAMESPACE::ShaderEXT shader; 25435 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25436 d.vkCreateShadersEXT( m_device, 25437 1, 25438 reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), 25439 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25440 reinterpret_cast<VkShaderEXT *>( &shader ) ) ); 25441 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25442 VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", 25443 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25444 25445 return ResultValue<VULKAN_HPP_NAMESPACE::ShaderEXT>( result, std::move( shader ) ); 25446 } 25447 25448 # ifndef VULKAN_HPP_NO_SMART_HANDLE 25449 template <typename Dispatch, typename ShaderEXTAllocator> 25450 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>> createShadersEXTUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25451 Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 25452 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25453 Dispatch const & d ) const 25454 { 25455 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25456 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25457 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25458 # endif 25459 25460 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() ); 25461 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25462 d.vkCreateShadersEXT( m_device, 25463 createInfos.size(), 25464 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 25465 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25466 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 25467 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25468 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", 25469 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25470 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders; 25471 uniqueShaders.reserve( createInfos.size() ); 25472 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 25473 for ( auto const & shader : shaders ) 25474 { 25475 uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); 25476 } 25477 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); 25478 } 25479 25480 template < 25481 typename Dispatch, 25482 typename ShaderEXTAllocator, 25483 typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::value, int>::type> 25484 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>> createShadersEXTUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,ShaderEXTAllocator & shaderEXTAllocator,Dispatch const & d) const25485 Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 25486 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25487 ShaderEXTAllocator & shaderEXTAllocator, 25488 Dispatch const & d ) const 25489 { 25490 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25491 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25492 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25493 # endif 25494 25495 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() ); 25496 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25497 d.vkCreateShadersEXT( m_device, 25498 createInfos.size(), 25499 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 25500 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25501 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 25502 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25503 VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", 25504 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25505 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); 25506 uniqueShaders.reserve( createInfos.size() ); 25507 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 25508 for ( auto const & shader : shaders ) 25509 { 25510 uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); 25511 } 25512 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); 25513 } 25514 25515 template <typename Dispatch> 25516 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>> createShaderEXTUnique(const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25517 Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, 25518 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25519 Dispatch const & d ) const 25520 { 25521 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25522 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25523 VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); 25524 # endif 25525 25526 VULKAN_HPP_NAMESPACE::ShaderEXT shader; 25527 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25528 d.vkCreateShadersEXT( m_device, 25529 1, 25530 reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), 25531 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 25532 reinterpret_cast<VkShaderEXT *>( &shader ) ) ); 25533 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, 25534 VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", 25535 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); 25536 25537 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>( 25538 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>( shader, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 25539 } 25540 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 25541 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25542 25543 template <typename Dispatch> destroyShaderEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const25544 VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 25545 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25546 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25547 { 25548 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25549 d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 25550 } 25551 25552 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25553 template <typename Dispatch> destroyShaderEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25554 VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 25555 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25557 { 25558 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25559 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25560 VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" ); 25561 # endif 25562 25563 d.vkDestroyShaderEXT( m_device, 25564 static_cast<VkShaderEXT>( shader ), 25565 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 25566 } 25567 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25568 25569 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderEXT shader,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const25570 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 25571 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 25572 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25573 { 25574 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25575 d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 25576 } 25577 25578 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25579 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const25580 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 25581 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 25582 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25583 { 25584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25585 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25586 VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" ); 25587 # endif 25588 25589 d.vkDestroyShaderEXT( m_device, 25590 static_cast<VkShaderEXT>( shader ), 25591 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 25592 } 25593 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25594 25595 template <typename Dispatch> 25596 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,size_t * pDataSize,void * pData,Dispatch const & d) const25597 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25598 { 25599 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25600 return static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), pDataSize, pData ) ); 25601 } 25602 25603 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25604 template <typename Uint8_tAllocator, typename Dispatch> 25605 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Dispatch const & d) const25606 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const 25607 { 25608 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25609 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25610 VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" ); 25611 # endif 25612 25613 std::vector<uint8_t, Uint8_tAllocator> data; 25614 size_t dataSize; 25615 VULKAN_HPP_NAMESPACE::Result result; 25616 do 25617 { 25618 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); 25619 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 25620 { 25621 data.resize( dataSize ); 25622 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25623 d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 25624 } 25625 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 25626 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); 25627 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 25628 if ( dataSize < data.size() ) 25629 { 25630 data.resize( dataSize ); 25631 } 25632 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 25633 } 25634 25635 template <typename Uint8_tAllocator, 25636 typename Dispatch, 25637 typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type> 25638 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const25639 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 25640 { 25641 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25642 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25643 VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" ); 25644 # endif 25645 25646 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 25647 size_t dataSize; 25648 VULKAN_HPP_NAMESPACE::Result result; 25649 do 25650 { 25651 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); 25652 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 25653 { 25654 data.resize( dataSize ); 25655 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25656 d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 25657 } 25658 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 25659 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); 25660 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 25661 if ( dataSize < data.size() ) 25662 { 25663 data.resize( dataSize ); 25664 } 25665 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); 25666 } 25667 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25668 25669 template <typename Dispatch> bindShadersEXT(uint32_t stageCount,const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages,const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,Dispatch const & d) const25670 VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount, 25671 const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, 25672 const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, 25673 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25674 { 25675 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25676 d.vkCmdBindShadersEXT( 25677 m_commandBuffer, stageCount, reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), reinterpret_cast<const VkShaderEXT *>( pShaders ) ); 25678 } 25679 25680 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25681 template <typename Dispatch> bindShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,Dispatch const & d) const25682 VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages, 25683 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders, 25684 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 25685 { 25686 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25687 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25688 VULKAN_HPP_ASSERT( d.vkCmdBindShadersEXT && "Function <vkCmdBindShadersEXT> requires <VK_EXT_shader_object>" ); 25689 # endif 25690 # ifdef VULKAN_HPP_NO_EXCEPTIONS 25691 VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); 25692 # else 25693 if ( stages.size() != shaders.size() ) 25694 { 25695 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); 25696 } 25697 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 25698 25699 d.vkCmdBindShadersEXT( m_commandBuffer, 25700 stages.size(), 25701 reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ), 25702 reinterpret_cast<const VkShaderEXT *>( shaders.data() ) ); 25703 } 25704 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25705 25706 //=== VK_QCOM_tile_properties === 25707 25708 template <typename Dispatch> getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,uint32_t * pPropertiesCount,VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,Dispatch const & d) const25709 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 25710 uint32_t * pPropertiesCount, 25711 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, 25712 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25713 { 25714 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25715 return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( 25716 m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); 25717 } 25718 25719 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25720 template <typename TilePropertiesQCOMAllocator, typename Dispatch> 25721 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Dispatch const & d) const25722 Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const 25723 { 25724 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25725 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25726 VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); 25727 # endif 25728 25729 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties; 25730 uint32_t propertiesCount; 25731 VULKAN_HPP_NAMESPACE::Result result; 25732 do 25733 { 25734 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25735 d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); 25736 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) 25737 { 25738 properties.resize( propertiesCount ); 25739 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM( 25740 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); 25741 } 25742 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 25743 25744 VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); 25745 if ( propertiesCount < properties.size() ) 25746 { 25747 properties.resize( propertiesCount ); 25748 } 25749 return properties; 25750 } 25751 25752 template <typename TilePropertiesQCOMAllocator, 25753 typename Dispatch, 25754 typename std::enable_if<std::is_same<typename TilePropertiesQCOMAllocator::value_type, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, int>::type> 25755 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,Dispatch const & d) const25756 Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 25757 TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, 25758 Dispatch const & d ) const 25759 { 25760 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25761 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25762 VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); 25763 # endif 25764 25765 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator ); 25766 uint32_t propertiesCount; 25767 VULKAN_HPP_NAMESPACE::Result result; 25768 do 25769 { 25770 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25771 d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); 25772 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) 25773 { 25774 properties.resize( propertiesCount ); 25775 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM( 25776 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); 25777 } 25778 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 25779 25780 VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); 25781 if ( propertiesCount < properties.size() ) 25782 { 25783 properties.resize( propertiesCount ); 25784 } 25785 return properties; 25786 } 25787 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25788 25789 template <typename Dispatch> getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,Dispatch const & d) const25790 VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 25791 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, 25792 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25793 { 25794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25795 return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( 25796 m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); 25797 } 25798 25799 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25800 template <typename Dispatch> 25801 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const25802 Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25803 { 25804 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25805 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25806 VULKAN_HPP_ASSERT( d.vkGetDynamicRenderingTilePropertiesQCOM && "Function <vkGetDynamicRenderingTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); 25807 # endif 25808 25809 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; 25810 d.vkGetDynamicRenderingTilePropertiesQCOM( 25811 m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) ); 25812 25813 return properties; 25814 } 25815 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25816 25817 //=== VK_NV_low_latency2 === 25818 25819 template <typename Dispatch> setLatencySleepModeNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo,Dispatch const & d) const25820 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 25821 const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, 25822 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25823 { 25824 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25825 return static_cast<Result>( 25826 d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) ); 25827 } 25828 25829 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25830 template <typename Dispatch> setLatencySleepModeNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo,Dispatch const & d) const25831 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 25832 const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, 25833 Dispatch const & d ) const 25834 { 25835 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25836 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25837 VULKAN_HPP_ASSERT( d.vkSetLatencySleepModeNV && "Function <vkSetLatencySleepModeNV> requires <VK_NV_low_latency2>" ); 25838 # endif 25839 25840 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 25841 d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( &sleepModeInfo ) ) ); 25842 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); 25843 25844 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); 25845 } 25846 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25847 25848 template <typename Dispatch> latencySleepNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo,Dispatch const & d) const25849 VULKAN_HPP_INLINE Result Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 25850 const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, 25851 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25852 { 25853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25854 return static_cast<Result>( 25855 d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) ); 25856 } 25857 25858 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25859 template <typename Dispatch> latencySleepNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo,Dispatch const & d) const25860 VULKAN_HPP_INLINE void Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 25861 const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, 25862 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25863 { 25864 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25865 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25866 VULKAN_HPP_ASSERT( d.vkLatencySleepNV && "Function <vkLatencySleepNV> requires <VK_NV_low_latency2>" ); 25867 # endif 25868 25869 d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( &sleepInfo ) ); 25870 } 25871 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25872 25873 template <typename Dispatch> setLatencyMarkerNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo,Dispatch const & d) const25874 VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 25875 const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, 25876 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25877 { 25878 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25879 d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); 25880 } 25881 25882 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25883 template <typename Dispatch> setLatencyMarkerNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo,Dispatch const & d) const25884 VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 25885 const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, 25886 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25887 { 25888 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25889 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25890 VULKAN_HPP_ASSERT( d.vkSetLatencyMarkerNV && "Function <vkSetLatencyMarkerNV> requires <VK_NV_low_latency2>" ); 25891 # endif 25892 25893 d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 25894 } 25895 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25896 25897 template <typename Dispatch> getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo,Dispatch const & d) const25898 VULKAN_HPP_INLINE void Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 25899 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, 25900 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25901 { 25902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25903 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); 25904 } 25905 25906 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25907 template <typename Dispatch> 25908 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const25909 Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25910 { 25911 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25912 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25913 VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" ); 25914 # endif 25915 25916 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; 25917 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 25918 25919 return latencyMarkerInfo; 25920 } 25921 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25922 25923 template <typename Dispatch> notifyOutOfBandNV(const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo,Dispatch const & d) const25924 VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, 25925 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25926 { 25927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25928 d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) ); 25929 } 25930 25931 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25932 template <typename Dispatch> notifyOutOfBandNV(const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo,Dispatch const & d) const25933 VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, 25934 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25935 { 25936 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25937 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25938 VULKAN_HPP_ASSERT( d.vkQueueNotifyOutOfBandNV && "Function <vkQueueNotifyOutOfBandNV> requires <VK_NV_low_latency2>" ); 25939 # endif 25940 25941 d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( &queueTypeInfo ) ); 25942 } 25943 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 25944 25945 //=== VK_KHR_cooperative_matrix === 25946 25947 template <typename Dispatch> getCooperativeMatrixPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties,Dispatch const & d) const25948 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR( 25949 uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 25950 { 25951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25952 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 25953 m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) ); 25954 } 25955 25956 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 25957 template <typename CooperativeMatrixPropertiesKHRAllocator, typename Dispatch> 25958 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 25959 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type getCooperativeMatrixPropertiesKHR(Dispatch const & d) const25960 PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const 25961 { 25962 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 25963 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 25964 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && 25965 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" ); 25966 # endif 25967 25968 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties; 25969 uint32_t propertyCount; 25970 VULKAN_HPP_NAMESPACE::Result result; 25971 do 25972 { 25973 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 25974 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 25975 { 25976 properties.resize( propertyCount ); 25977 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 25978 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); 25979 } 25980 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 25981 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); 25982 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 25983 if ( propertyCount < properties.size() ) 25984 { 25985 properties.resize( propertyCount ); 25986 } 25987 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 25988 } 25989 25990 template <typename CooperativeMatrixPropertiesKHRAllocator, 25991 typename Dispatch, 25992 typename std::enable_if< 25993 std::is_same<typename CooperativeMatrixPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>::value, 25994 int>::type> 25995 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 25996 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type getCooperativeMatrixPropertiesKHR(CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator,Dispatch const & d) const25997 PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, 25998 Dispatch const & d ) const 25999 { 26000 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26001 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26002 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && 26003 "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" ); 26004 # endif 26005 26006 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties( 26007 cooperativeMatrixPropertiesKHRAllocator ); 26008 uint32_t propertyCount; 26009 VULKAN_HPP_NAMESPACE::Result result; 26010 do 26011 { 26012 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 26013 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 26014 { 26015 properties.resize( propertyCount ); 26016 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 26017 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); 26018 } 26019 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26020 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); 26021 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 26022 if ( propertyCount < properties.size() ) 26023 { 26024 properties.resize( propertyCount ); 26025 } 26026 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 26027 } 26028 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26029 26030 //=== VK_EXT_attachment_feedback_loop_dynamic_state === 26031 26032 template <typename Dispatch> setAttachmentFeedbackLoopEnableEXT(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask,Dispatch const & d) const26033 VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, 26034 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26035 { 26036 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26037 d.vkCmdSetAttachmentFeedbackLoopEnableEXT( m_commandBuffer, static_cast<VkImageAspectFlags>( aspectMask ) ); 26038 } 26039 26040 #if defined( VK_USE_PLATFORM_SCREEN_QNX ) 26041 //=== VK_QNX_external_memory_screen_buffer === 26042 26043 template <typename Dispatch> getScreenBufferPropertiesQNX(const struct _screen_buffer * buffer,VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties,Dispatch const & d) const26044 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, 26045 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, 26046 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26047 { 26048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26049 return static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) ); 26050 } 26051 26052 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26053 template <typename Dispatch> 26054 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>::type getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const26055 Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const 26056 { 26057 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26058 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26059 VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" ); 26060 # endif 26061 26062 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; 26063 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26064 d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); 26065 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); 26066 26067 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); 26068 } 26069 26070 template <typename X, typename Y, typename... Z, typename Dispatch> 26071 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const26072 Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const 26073 { 26074 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26075 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26076 VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" ); 26077 # endif 26078 26079 StructureChain<X, Y, Z...> structureChain; 26080 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>(); 26081 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26082 d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); 26083 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); 26084 26085 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); 26086 } 26087 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26088 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ 26089 26090 //=== VK_KHR_line_rasterization === 26091 26092 template <typename Dispatch> 26093 VULKAN_HPP_INLINE void setLineStippleKHR(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const26094 CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26095 { 26096 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26097 d.vkCmdSetLineStippleKHR( m_commandBuffer, lineStippleFactor, lineStipplePattern ); 26098 } 26099 26100 //=== VK_KHR_calibrated_timestamps === 26101 26102 template <typename Dispatch> getCalibrateableTimeDomainsKHR(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,Dispatch const & d) const26103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, 26104 VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, 26105 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26106 { 26107 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26108 return static_cast<Result>( 26109 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); 26110 } 26111 26112 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26113 template <typename TimeDomainKHRAllocator, typename Dispatch> 26114 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsKHR(Dispatch const & d) const26115 PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const 26116 { 26117 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26118 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26119 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && 26120 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 26121 # endif 26122 26123 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; 26124 uint32_t timeDomainCount; 26125 VULKAN_HPP_NAMESPACE::Result result; 26126 do 26127 { 26128 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); 26129 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 26130 { 26131 timeDomains.resize( timeDomainCount ); 26132 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26133 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 26134 } 26135 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26136 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); 26137 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 26138 if ( timeDomainCount < timeDomains.size() ) 26139 { 26140 timeDomains.resize( timeDomainCount ); 26141 } 26142 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 26143 } 26144 26145 template <typename TimeDomainKHRAllocator, 26146 typename Dispatch, 26147 typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 26148 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsKHR(TimeDomainKHRAllocator & timeDomainKHRAllocator,Dispatch const & d) const26149 PhysicalDevice::getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const 26150 { 26151 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26152 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26153 VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && 26154 "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 26155 # endif 26156 26157 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); 26158 uint32_t timeDomainCount; 26159 VULKAN_HPP_NAMESPACE::Result result; 26160 do 26161 { 26162 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); 26163 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 26164 { 26165 timeDomains.resize( timeDomainCount ); 26166 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26167 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 26168 } 26169 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 26170 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); 26171 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 26172 if ( timeDomainCount < timeDomains.size() ) 26173 { 26174 timeDomains.resize( timeDomainCount ); 26175 } 26176 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); 26177 } 26178 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26179 26180 template <typename Dispatch> getCalibratedTimestampsKHR(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const26181 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsKHR( uint32_t timestampCount, 26182 const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, 26183 uint64_t * pTimestamps, 26184 uint64_t * pMaxDeviation, 26185 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26186 { 26187 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26188 return static_cast<Result>( d.vkGetCalibratedTimestampsKHR( 26189 m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); 26190 } 26191 26192 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26193 template <typename Uint64_tAllocator, typename Dispatch> 26194 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Dispatch const & d) const26195 Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 26196 Dispatch const & d ) const 26197 { 26198 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26199 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26200 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && 26201 "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 26202 # endif 26203 26204 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 26205 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); 26206 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 26207 uint64_t & maxDeviation = data_.second; 26208 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR( 26209 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 26210 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); 26211 26212 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 26213 } 26214 26215 template <typename Uint64_tAllocator, 26216 typename Dispatch, 26217 typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type> 26218 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const26219 Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 26220 Uint64_tAllocator & uint64_tAllocator, 26221 Dispatch const & d ) const 26222 { 26223 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26224 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26225 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && 26226 "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 26227 # endif 26228 26229 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 26230 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); 26231 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 26232 uint64_t & maxDeviation = data_.second; 26233 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR( 26234 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 26235 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); 26236 26237 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 26238 } 26239 26240 template <typename Dispatch> 26241 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type getCalibratedTimestampKHR(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo,Dispatch const & d) const26242 Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const 26243 { 26244 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26245 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26246 VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && 26247 "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); 26248 # endif 26249 26250 std::pair<uint64_t, uint64_t> data_; 26251 uint64_t & timestamp = data_.first; 26252 uint64_t & maxDeviation = data_.second; 26253 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 26254 d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); 26255 VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); 26256 26257 return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); 26258 } 26259 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26260 26261 //=== VK_KHR_maintenance6 === 26262 26263 template <typename Dispatch> bindDescriptorSets2KHR(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo,Dispatch const & d) const26264 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo, 26265 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26266 { 26267 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26268 d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( pBindDescriptorSetsInfo ) ); 26269 } 26270 26271 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26272 template <typename Dispatch> bindDescriptorSets2KHR(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo,Dispatch const & d) const26273 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo, 26274 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26275 { 26276 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26277 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26278 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2KHR && "Function <vkCmdBindDescriptorSets2KHR> requires <VK_KHR_maintenance6>" ); 26279 # endif 26280 26281 d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( &bindDescriptorSetsInfo ) ); 26282 } 26283 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26284 26285 template <typename Dispatch> pushConstants2KHR(const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR * pPushConstantsInfo,Dispatch const & d) const26286 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR * pPushConstantsInfo, 26287 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26288 { 26289 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26290 d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfoKHR *>( pPushConstantsInfo ) ); 26291 } 26292 26293 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26294 template <typename Dispatch> pushConstants2KHR(const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo,Dispatch const & d) const26295 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo, 26296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26297 { 26298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26299 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26300 VULKAN_HPP_ASSERT( d.vkCmdPushConstants2KHR && "Function <vkCmdPushConstants2KHR> requires <VK_KHR_maintenance6>" ); 26301 # endif 26302 26303 d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfoKHR *>( &pushConstantsInfo ) ); 26304 } 26305 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26306 26307 template <typename Dispatch> pushDescriptorSet2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR * pPushDescriptorSetInfo,Dispatch const & d) const26308 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR * pPushDescriptorSetInfo, 26309 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26310 { 26311 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26312 d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( pPushDescriptorSetInfo ) ); 26313 } 26314 26315 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26316 template <typename Dispatch> pushDescriptorSet2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo,Dispatch const & d) const26317 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo, 26318 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26319 { 26320 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26321 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26322 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2KHR && "Function <vkCmdPushDescriptorSet2KHR> requires <VK_KHR_maintenance6>" ); 26323 # endif 26324 26325 d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( &pushDescriptorSetInfo ) ); 26326 } 26327 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26328 26329 template <typename Dispatch> 26330 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo,Dispatch const & d) const26331 CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo, 26332 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26333 { 26334 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26335 d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, 26336 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfoKHR *>( pPushDescriptorSetWithTemplateInfo ) ); 26337 } 26338 26339 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26340 template <typename Dispatch> 26341 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo,Dispatch const & d) const26342 CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo, 26343 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26344 { 26345 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26346 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26347 VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2KHR && "Function <vkCmdPushDescriptorSetWithTemplate2KHR> requires <VK_KHR_maintenance6>" ); 26348 # endif 26349 26350 d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, 26351 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfoKHR *>( &pushDescriptorSetWithTemplateInfo ) ); 26352 } 26353 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26354 26355 template <typename Dispatch> 26356 VULKAN_HPP_INLINE void setDescriptorBufferOffsets2EXT(const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo,Dispatch const & d) const26357 CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, 26358 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26359 { 26360 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26361 d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) ); 26362 } 26363 26364 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26365 template <typename Dispatch> 26366 VULKAN_HPP_INLINE void setDescriptorBufferOffsets2EXT(const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo,Dispatch const & d) const26367 CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, 26368 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26369 { 26370 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26371 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26372 VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsets2EXT && "Function <vkCmdSetDescriptorBufferOffsets2EXT> requires <VK_KHR_maintenance6>" ); 26373 # endif 26374 26375 d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( &setDescriptorBufferOffsetsInfo ) ); 26376 } 26377 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26378 26379 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplers2EXT(const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo,Dispatch const & d) const26380 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( 26381 const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, 26382 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26383 { 26384 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26385 d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( 26386 m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) ); 26387 } 26388 26389 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 26390 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplers2EXT(const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo,Dispatch const & d) const26391 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( 26392 const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, 26393 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 26394 { 26395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 26396 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) 26397 VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT && 26398 "Function <vkCmdBindDescriptorBufferEmbeddedSamplers2EXT> requires <VK_KHR_maintenance6>" ); 26399 # endif 26400 26401 d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( 26402 m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( &bindDescriptorBufferEmbeddedSamplersInfo ) ); 26403 } 26404 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 26405 26406 } // namespace VULKAN_HPP_NAMESPACE 26407 #endif 26408