Vulkan-Hpp
vulkan_funcs.hpp
Go to the documentation of this file.
1 // Copyright 2015-2024 The Khronos Group Inc.
2 //
3 // SPDX-License-Identifier: Apache-2.0 OR MIT
4 //
5 
6 // This header is generated from the Khronos Vulkan XML API Registry.
7 
8 #ifndef VULKAN_FUNCS_HPP
9 #define VULKAN_FUNCS_HPP
10 
11 namespace VULKAN_HPP_NAMESPACE
12 {
13 
14  //===========================
15  //=== COMMAND Definitions ===
16  //===========================
17 
18  //=== VK_VERSION_1_0 ===
19 
20  template <typename Dispatch>
21  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,
22  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
23  VULKAN_HPP_NAMESPACE::Instance * pInstance,
24  Dispatch const & d ) VULKAN_HPP_NOEXCEPT
25  {
26  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
27  return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ),
28  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
29  reinterpret_cast<VkInstance *>( pInstance ) ) );
30  }
31 
32 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
33  template <typename Dispatch>
35  const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
36  {
37  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
38 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
39  VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" );
40 # endif
41 
42  VULKAN_HPP_NAMESPACE::Instance instance;
44  d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
45  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
46  reinterpret_cast<VkInstance *>( &instance ) ) );
47  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
48 
49  return createResultValueType( result, instance );
50  }
51 
52 # ifndef VULKAN_HPP_NO_SMART_HANDLE
53  template <typename Dispatch>
55  const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
56  {
57  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
58 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
59  VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" );
60 # endif
61 
62  VULKAN_HPP_NAMESPACE::Instance instance;
64  d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
65  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
66  reinterpret_cast<VkInstance *>( &instance ) ) );
67  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" );
68 
69  return createResultValueType( result,
71  }
72 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
73 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
74 
75  template <typename Dispatch>
76  VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
77  {
78  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
79  d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
80  }
81 
82 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
83  template <typename Dispatch>
84  VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
85  {
86  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
87 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
88  VULKAN_HPP_ASSERT( d.vkDestroyInstance && "Function <vkDestroyInstance> requires <VK_VERSION_1_0>" );
89 # endif
90 
91  d.vkDestroyInstance( m_instance,
92  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
93  }
94 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
95 
96  template <typename Dispatch>
97  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
98  VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
99  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
100  {
101  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
102  return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
103  }
104 
105 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
106  template <typename PhysicalDeviceAllocator, typename Dispatch>
108  Instance::enumeratePhysicalDevices( Dispatch const & d ) const
109  {
110  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
111 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
112  VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" );
113 # endif
114 
115  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
116  uint32_t physicalDeviceCount;
118  do
119  {
120  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
121  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount )
122  {
123  physicalDevices.resize( physicalDeviceCount );
124  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
125  d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
126  }
127  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
128  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
129  VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
130  if ( physicalDeviceCount < physicalDevices.size() )
131  {
132  physicalDevices.resize( physicalDeviceCount );
133  }
134  return createResultValueType( result, physicalDevices );
135  }
136 
137  template <typename PhysicalDeviceAllocator,
138  typename Dispatch,
139  typename std::enable_if<std::is_same<typename PhysicalDeviceAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDevice>::value, int>::type>
141  Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
142  {
143  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
144 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
145  VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" );
146 # endif
147 
148  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
149  uint32_t physicalDeviceCount;
151  do
152  {
153  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
154  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount )
155  {
156  physicalDevices.resize( physicalDeviceCount );
157  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
158  d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
159  }
160  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
161  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
162  VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
163  if ( physicalDeviceCount < physicalDevices.size() )
164  {
165  physicalDevices.resize( physicalDeviceCount );
166  }
167  return createResultValueType( result, physicalDevices );
168  }
169 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
170 
171  template <typename Dispatch>
172  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
173  {
174  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
175  d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
176  }
177 
178 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
179  template <typename Dispatch>
180  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
181  PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
182  {
183  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
184 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
185  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures && "Function <vkGetPhysicalDeviceFeatures> requires <VK_VERSION_1_0>" );
186 # endif
187 
188  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
189  d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
190 
191  return features;
192  }
193 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
194 
195  template <typename Dispatch>
196  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
197  VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
198  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
199  {
200  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
201  d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
202  }
203 
204 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
205  template <typename Dispatch>
206  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
207  PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
208  {
209  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
210 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
211  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties && "Function <vkGetPhysicalDeviceFormatProperties> requires <VK_VERSION_1_0>" );
212 # endif
213 
214  VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
215  d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
216 
217  return formatProperties;
218  }
219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
220 
221  template <typename Dispatch>
222  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
227  VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
228  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
229  {
230  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
231  return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
232  static_cast<VkFormat>( format ),
233  static_cast<VkImageType>( type ),
234  static_cast<VkImageTiling>( tiling ),
235  static_cast<VkImageUsageFlags>( usage ),
236  static_cast<VkImageCreateFlags>( flags ),
237  reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
238  }
239 
240 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
241  template <typename Dispatch>
243  PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
248  Dispatch const & d ) const
249  {
250  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
251 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
252  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties && "Function <vkGetPhysicalDeviceImageFormatProperties> requires <VK_VERSION_1_0>" );
253 # endif
254 
255  VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
257  d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
258  static_cast<VkFormat>( format ),
259  static_cast<VkImageType>( type ),
260  static_cast<VkImageTiling>( tiling ),
261  static_cast<VkImageUsageFlags>( usage ),
262  static_cast<VkImageCreateFlags>( flags ),
263  reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) );
264  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
265 
266  return createResultValueType( result, imageFormatProperties );
267  }
268 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
269 
270  template <typename Dispatch>
271  VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
272  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
273  {
274  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
275  d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
276  }
277 
278 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
279  template <typename Dispatch>
280  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
281  PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
282  {
283  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
284 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
285  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties && "Function <vkGetPhysicalDeviceProperties> requires <VK_VERSION_1_0>" );
286 # endif
287 
288  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
289  d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
290 
291  return properties;
292  }
293 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
294 
295  template <typename Dispatch>
296  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
297  VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
298  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
299  {
300  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
301  d.vkGetPhysicalDeviceQueueFamilyProperties(
302  m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
303  }
304 
305 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
306  template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
307  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
308  PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
309  {
310  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
311 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
312  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" );
313 # endif
314 
315  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
316  uint32_t queueFamilyPropertyCount;
317  d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
318  queueFamilyProperties.resize( queueFamilyPropertyCount );
319  d.vkGetPhysicalDeviceQueueFamilyProperties(
320  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
321 
322  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
323  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
324  {
325  queueFamilyProperties.resize( queueFamilyPropertyCount );
326  }
327  return queueFamilyProperties;
328  }
329 
330  template <
331  typename QueueFamilyPropertiesAllocator,
332  typename Dispatch,
333  typename std::enable_if<std::is_same<typename QueueFamilyPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, int>::type>
334  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
335  PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
336  {
337  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
338 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
339  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" );
340 # endif
341 
342  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
343  uint32_t queueFamilyPropertyCount;
344  d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
345  queueFamilyProperties.resize( queueFamilyPropertyCount );
346  d.vkGetPhysicalDeviceQueueFamilyProperties(
347  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
348 
349  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
350  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
351  {
352  queueFamilyProperties.resize( queueFamilyPropertyCount );
353  }
354  return queueFamilyProperties;
355  }
356 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
357 
358  template <typename Dispatch>
359  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
360  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
361  {
362  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
363  d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
364  }
365 
366 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
367  template <typename Dispatch>
368  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
369  PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
370  {
371  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
372 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
373  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties && "Function <vkGetPhysicalDeviceMemoryProperties> requires <VK_VERSION_1_0>" );
374 # endif
375 
376  VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
377  d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
378 
379  return memoryProperties;
380  }
381 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
382 
383  template <typename Dispatch>
384  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
385  {
386  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
387  return d.vkGetInstanceProcAddr( m_instance, pName );
388  }
389 
390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
391  template <typename Dispatch>
392  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
393  {
394  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
395 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
396  VULKAN_HPP_ASSERT( d.vkGetInstanceProcAddr && "Function <vkGetInstanceProcAddr> requires <VK_VERSION_1_0>" );
397 # endif
398 
399  PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() );
400 
401  return result;
402  }
403 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
404 
405  template <typename Dispatch>
406  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
407  {
408  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
409  return d.vkGetDeviceProcAddr( m_device, pName );
410  }
411 
412 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
413  template <typename Dispatch>
414  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
415  {
416  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
417 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
418  VULKAN_HPP_ASSERT( d.vkGetDeviceProcAddr && "Function <vkGetDeviceProcAddr> requires <VK_VERSION_1_0>" );
419 # endif
420 
421  PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() );
422 
423  return result;
424  }
425 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
426 
427  template <typename Dispatch>
428  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,
429  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
430  VULKAN_HPP_NAMESPACE::Device * pDevice,
431  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
432  {
433  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
434  return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
435  reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ),
436  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
437  reinterpret_cast<VkDevice *>( pDevice ) ) );
438  }
439 
440 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
441  template <typename Dispatch>
443  const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
444  {
445  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
446 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
447  VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" );
448 # endif
449 
450  VULKAN_HPP_NAMESPACE::Device device;
452  d.vkCreateDevice( m_physicalDevice,
453  reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
454  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
455  reinterpret_cast<VkDevice *>( &device ) ) );
456  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
457 
458  return createResultValueType( result, device );
459  }
460 
461 # ifndef VULKAN_HPP_NO_SMART_HANDLE
462  template <typename Dispatch>
464  PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
466  Dispatch const & d ) const
467  {
468  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
469 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
470  VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" );
471 # endif
472 
473  VULKAN_HPP_NAMESPACE::Device device;
475  d.vkCreateDevice( m_physicalDevice,
476  reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
477  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
478  reinterpret_cast<VkDevice *>( &device ) ) );
479  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" );
480 
482  }
483 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
484 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
485 
486  template <typename Dispatch>
487  VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
488  {
489  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
490  d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
491  }
492 
493 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
494  template <typename Dispatch>
495  VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
496  {
497  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
498 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
499  VULKAN_HPP_ASSERT( d.vkDestroyDevice && "Function <vkDestroyDevice> requires <VK_VERSION_1_0>" );
500 # endif
501 
502  d.vkDestroyDevice( m_device,
503  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
504  }
505 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
506 
507  template <typename Dispatch>
509  uint32_t * pPropertyCount,
510  VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
511  Dispatch const & d ) VULKAN_HPP_NOEXCEPT
512  {
513  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
514  return static_cast<Result>(
515  d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
516  }
517 
518 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
519  template <typename ExtensionPropertiesAllocator, typename Dispatch>
520  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
522  {
523  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
524 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
525  VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" );
526 # endif
527 
528  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
529  uint32_t propertyCount;
531  do
532  {
533  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
534  d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
535  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
536  {
537  properties.resize( propertyCount );
538  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties(
539  layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
540  }
541  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
542  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
543  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
544  if ( propertyCount < properties.size() )
545  {
546  properties.resize( propertyCount );
547  }
548  return createResultValueType( result, properties );
549  }
550 
551  template <
552  typename ExtensionPropertiesAllocator,
553  typename Dispatch,
554  typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type>
555  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
557  ExtensionPropertiesAllocator & extensionPropertiesAllocator,
558  Dispatch const & d )
559  {
560  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
561 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
562  VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" );
563 # endif
564 
565  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
566  uint32_t propertyCount;
568  do
569  {
570  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
571  d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
572  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
573  {
574  properties.resize( propertyCount );
575  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties(
576  layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
577  }
578  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
579  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
580  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
581  if ( propertyCount < properties.size() )
582  {
583  properties.resize( propertyCount );
584  }
585  return createResultValueType( result, properties );
586  }
587 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
588 
589  template <typename Dispatch>
590  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName,
591  uint32_t * pPropertyCount,
592  VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
593  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
594  {
595  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
596  return static_cast<Result>(
597  d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
598  }
599 
600 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
601  template <typename ExtensionPropertiesAllocator, typename Dispatch>
603  PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const
604  {
605  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
606 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
607  VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" );
608 # endif
609 
610  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
611  uint32_t propertyCount;
613  do
614  {
615  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
616  d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
617  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
618  {
619  properties.resize( propertyCount );
620  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties(
621  m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
622  }
623  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
624  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
625  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
626  if ( propertyCount < properties.size() )
627  {
628  properties.resize( propertyCount );
629  }
630  return createResultValueType( result, properties );
631  }
632 
633  template <
634  typename ExtensionPropertiesAllocator,
635  typename Dispatch,
636  typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type>
638  PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
639  ExtensionPropertiesAllocator & extensionPropertiesAllocator,
640  Dispatch const & d ) const
641  {
642  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
644  VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" );
645 # endif
646 
647  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
648  uint32_t propertyCount;
650  do
651  {
652  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
653  d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
654  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
655  {
656  properties.resize( propertyCount );
657  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties(
658  m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
659  }
660  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
661  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
662  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
663  if ( propertyCount < properties.size() )
664  {
665  properties.resize( propertyCount );
666  }
667  return createResultValueType( result, properties );
668  }
669 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
670 
671  template <typename Dispatch>
673  VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
674  Dispatch const & d ) VULKAN_HPP_NOEXCEPT
675  {
676  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
677  return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
678  }
679 
680 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
681  template <typename LayerPropertiesAllocator, typename Dispatch>
682  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
683  enumerateInstanceLayerProperties( Dispatch const & d )
684  {
685  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
686 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
687  VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" );
688 # endif
689 
690  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
691  uint32_t propertyCount;
693  do
694  {
695  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
696  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
697  {
698  properties.resize( propertyCount );
699  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
700  d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
701  }
702  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
703  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
704  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
705  if ( propertyCount < properties.size() )
706  {
707  properties.resize( propertyCount );
708  }
709  return createResultValueType( result, properties );
710  }
711 
712  template <typename LayerPropertiesAllocator,
713  typename Dispatch,
714  typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type>
715  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
716  enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
717  {
718  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
719 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
720  VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" );
721 # endif
722 
723  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
724  uint32_t propertyCount;
726  do
727  {
728  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
729  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
730  {
731  properties.resize( propertyCount );
732  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
733  d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
734  }
735  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
736  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
737  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
738  if ( propertyCount < properties.size() )
739  {
740  properties.resize( propertyCount );
741  }
742  return createResultValueType( result, properties );
743  }
744 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
745 
746  template <typename Dispatch>
747  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount,
748  VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
749  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
750  {
751  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
752  return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
753  }
754 
755 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
756  template <typename LayerPropertiesAllocator, typename Dispatch>
758  PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
759  {
760  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
761 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
762  VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" );
763 # endif
764 
765  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
766  uint32_t propertyCount;
768  do
769  {
770  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
771  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
772  {
773  properties.resize( propertyCount );
774  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
775  d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
776  }
777  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
778  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
779  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
780  if ( propertyCount < properties.size() )
781  {
782  properties.resize( propertyCount );
783  }
784  return createResultValueType( result, properties );
785  }
786 
787  template <typename LayerPropertiesAllocator,
788  typename Dispatch,
789  typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type>
791  PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const
792  {
793  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
794 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
795  VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" );
796 # endif
797 
798  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
799  uint32_t propertyCount;
801  do
802  {
803  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
804  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
805  {
806  properties.resize( propertyCount );
807  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
808  d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
809  }
810  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
811  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
812  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
813  if ( propertyCount < properties.size() )
814  {
815  properties.resize( propertyCount );
816  }
817  return createResultValueType( result, properties );
818  }
819 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
820 
821  template <typename Dispatch>
822  VULKAN_HPP_INLINE void
823  Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
824  {
825  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
826  d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
827  }
828 
829 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
830  template <typename Dispatch>
831  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
832  Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
833  {
834  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
835 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
836  VULKAN_HPP_ASSERT( d.vkGetDeviceQueue && "Function <vkGetDeviceQueue> requires <VK_VERSION_1_0>" );
837 # endif
838 
839  VULKAN_HPP_NAMESPACE::Queue queue;
840  d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
841 
842  return queue;
843  }
844 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
845 
846  template <typename Dispatch>
847  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount,
848  const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
849  VULKAN_HPP_NAMESPACE::Fence fence,
850  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
851  {
852  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
853  return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
854  }
855 
856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
857  template <typename Dispatch>
859  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
860  {
861  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
862 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
863  VULKAN_HPP_ASSERT( d.vkQueueSubmit && "Function <vkQueueSubmit> requires <VK_VERSION_1_0>" );
864 # endif
865 
867  d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ) );
868  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
869 
870  return createResultValueType( result );
871  }
872 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
873 
874 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
875  template <typename Dispatch>
876  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
877  {
878  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
879  return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
880  }
881 #else
882  template <typename Dispatch>
883  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const
884  {
885  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
886 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
887  VULKAN_HPP_ASSERT( d.vkQueueWaitIdle && "Function <vkQueueWaitIdle> requires <VK_VERSION_1_0>" );
888 # endif
889 
890  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueueWaitIdle( m_queue ) );
891  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
892 
893  return createResultValueType( result );
894  }
895 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
896 
897 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
898  template <typename Dispatch>
899  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
900  {
901  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
902  return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
903  }
904 #else
905  template <typename Dispatch>
906  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const
907  {
908  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
909 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
910  VULKAN_HPP_ASSERT( d.vkDeviceWaitIdle && "Function <vkDeviceWaitIdle> requires <VK_VERSION_1_0>" );
911 # endif
912 
913  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeviceWaitIdle( m_device ) );
914  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
915 
916  return createResultValueType( result );
917  }
918 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
919 
920  template <typename Dispatch>
921  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
922  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
923  VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
924  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
925  {
926  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
927  return static_cast<Result>( d.vkAllocateMemory( m_device,
928  reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ),
929  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
930  reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
931  }
932 
933 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
934  template <typename Dispatch>
936  Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
938  Dispatch const & d ) const
939  {
940  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
941 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
942  VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" );
943 # endif
944 
945  VULKAN_HPP_NAMESPACE::DeviceMemory memory;
947  d.vkAllocateMemory( m_device,
948  reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
949  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
950  reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
951  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
952 
953  return createResultValueType( result, memory );
954  }
955 
956 # ifndef VULKAN_HPP_NO_SMART_HANDLE
957  template <typename Dispatch>
959  Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
961  Dispatch const & d ) const
962  {
963  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
964 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
965  VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" );
966 # endif
967 
968  VULKAN_HPP_NAMESPACE::DeviceMemory memory;
970  d.vkAllocateMemory( m_device,
971  reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
972  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
973  reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
974  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" );
975 
976  return createResultValueType( result,
978  }
979 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
980 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
981 
982  template <typename Dispatch>
983  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
984  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
985  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
986  {
987  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
988  d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
989  }
990 
991 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
992  template <typename Dispatch>
993  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
995  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
996  {
997  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
998 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
999  VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" );
1000 # endif
1001 
1002  d.vkFreeMemory( m_device,
1003  static_cast<VkDeviceMemory>( memory ),
1004  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1005  }
1006 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1007 
1008  template <typename Dispatch>
1009  VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
1010  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1011  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1012  {
1013  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1014  d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1015  }
1016 
1017 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1018  template <typename Dispatch>
1019  VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
1020  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
1021  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1022  {
1023  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1024 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1025  VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" );
1026 # endif
1027 
1028  d.vkFreeMemory( m_device,
1029  static_cast<VkDeviceMemory>( memory ),
1030  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1031  }
1032 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1033 
1034  template <typename Dispatch>
1035  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
1039  void ** ppData,
1040  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1041  {
1042  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1043  return static_cast<Result>( d.vkMapMemory( m_device,
1044  static_cast<VkDeviceMemory>( memory ),
1045  static_cast<VkDeviceSize>( offset ),
1046  static_cast<VkDeviceSize>( size ),
1047  static_cast<VkMemoryMapFlags>( flags ),
1048  ppData ) );
1049  }
1050 
1051 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1052  template <typename Dispatch>
1053  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
1057  Dispatch const & d ) const
1058  {
1059  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1060 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1061  VULKAN_HPP_ASSERT( d.vkMapMemory && "Function <vkMapMemory> requires <VK_VERSION_1_0>" );
1062 # endif
1063 
1064  void * pData;
1065  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory( m_device,
1066  static_cast<VkDeviceMemory>( memory ),
1067  static_cast<VkDeviceSize>( offset ),
1068  static_cast<VkDeviceSize>( size ),
1069  static_cast<VkMemoryMapFlags>( flags ),
1070  &pData ) );
1071  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
1072 
1073  return createResultValueType( result, pData );
1074  }
1075 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1076 
1077  template <typename Dispatch>
1078  VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1079  {
1080  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1081  d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
1082  }
1083 
1084  template <typename Dispatch>
1085  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount,
1086  const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
1087  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1088  {
1089  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1090  return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
1091  }
1092 
1093 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1094  template <typename Dispatch>
1096  Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
1097  Dispatch const & d ) const
1098  {
1099  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1100 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1101  VULKAN_HPP_ASSERT( d.vkFlushMappedMemoryRanges && "Function <vkFlushMappedMemoryRanges> requires <VK_VERSION_1_0>" );
1102 # endif
1103 
1105  d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
1106  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
1107 
1108  return createResultValueType( result );
1109  }
1110 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1111 
1112  template <typename Dispatch>
1113  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount,
1114  const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
1115  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1116  {
1117  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1118  return static_cast<Result>(
1119  d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
1120  }
1121 
1122 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1123  template <typename Dispatch>
1125  Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
1126  Dispatch const & d ) const
1127  {
1128  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1129 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1130  VULKAN_HPP_ASSERT( d.vkInvalidateMappedMemoryRanges && "Function <vkInvalidateMappedMemoryRanges> requires <VK_VERSION_1_0>" );
1131 # endif
1132 
1134  d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
1135  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
1136 
1137  return createResultValueType( result );
1138  }
1139 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1140 
1141  template <typename Dispatch>
1142  VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
1143  VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
1144  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1145  {
1146  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1147  d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
1148  }
1149 
1150 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1151  template <typename Dispatch>
1152  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
1153  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1154  {
1155  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1156 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1157  VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryCommitment && "Function <vkGetDeviceMemoryCommitment> requires <VK_VERSION_1_0>" );
1158 # endif
1159 
1160  VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
1161  d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
1162 
1163  return committedMemoryInBytes;
1164  }
1165 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1166 
1167 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1168  template <typename Dispatch>
1169  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
1170  VULKAN_HPP_NAMESPACE::DeviceMemory memory,
1171  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
1172  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1173  {
1174  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1175  return static_cast<Result>(
1176  d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
1177  }
1178 #else
1179  template <typename Dispatch>
1181  VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
1182  {
1183  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1184 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1185  VULKAN_HPP_ASSERT( d.vkBindBufferMemory && "Function <vkBindBufferMemory> requires <VK_VERSION_1_0>" );
1186 # endif
1187 
1189  d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
1190  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
1191 
1192  return createResultValueType( result );
1193  }
1194 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1195 
1196 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1197  template <typename Dispatch>
1198  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
1199  VULKAN_HPP_NAMESPACE::DeviceMemory memory,
1200  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
1201  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1202  {
1203  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1204  return static_cast<Result>(
1205  d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
1206  }
1207 #else
1208  template <typename Dispatch>
1210  VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
1211  {
1212  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1213 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1214  VULKAN_HPP_ASSERT( d.vkBindImageMemory && "Function <vkBindImageMemory> requires <VK_VERSION_1_0>" );
1215 # endif
1216 
1218  d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
1219  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
1220 
1221  return createResultValueType( result );
1222  }
1223 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1224 
1225  template <typename Dispatch>
1226  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
1227  VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
1228  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1229  {
1230  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1231  d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
1232  }
1233 
1234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1235  template <typename Dispatch>
1236  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
1237  Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1238  {
1239  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1240 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1241  VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements && "Function <vkGetBufferMemoryRequirements> requires <VK_VERSION_1_0>" );
1242 # endif
1243 
1244  VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
1245  d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
1246 
1247  return memoryRequirements;
1248  }
1249 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1250 
1251  template <typename Dispatch>
1252  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
1253  VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
1254  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1255  {
1256  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1257  d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
1258  }
1259 
1260 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1261  template <typename Dispatch>
1262  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
1263  Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1264  {
1265  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1266 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1267  VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements && "Function <vkGetImageMemoryRequirements> requires <VK_VERSION_1_0>" );
1268 # endif
1269 
1270  VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
1271  d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
1272 
1273  return memoryRequirements;
1274  }
1275 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1276 
1277  template <typename Dispatch>
1278  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
1279  uint32_t * pSparseMemoryRequirementCount,
1280  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
1281  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1282  {
1283  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1284  d.vkGetImageSparseMemoryRequirements( m_device,
1285  static_cast<VkImage>( image ),
1286  pSparseMemoryRequirementCount,
1287  reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
1288  }
1289 
1290 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1291  template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
1292  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
1293  Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
1294  {
1295  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1296 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1297  VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" );
1298 # endif
1299 
1300  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
1301  uint32_t sparseMemoryRequirementCount;
1302  d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
1303  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1304  d.vkGetImageSparseMemoryRequirements( m_device,
1305  static_cast<VkImage>( image ),
1306  &sparseMemoryRequirementCount,
1307  reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
1308 
1309  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
1310  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
1311  {
1312  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1313  }
1314  return sparseMemoryRequirements;
1315  }
1316 
1317  template <typename SparseImageMemoryRequirementsAllocator,
1318  typename Dispatch,
1319  typename std::enable_if<
1320  std::is_same<typename SparseImageMemoryRequirementsAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value,
1321  int>::type>
1322  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
1323  Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
1324  SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
1325  Dispatch const & d ) const
1326  {
1327  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1328 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1329  VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" );
1330 # endif
1331 
1332  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
1333  sparseImageMemoryRequirementsAllocator );
1334  uint32_t sparseMemoryRequirementCount;
1335  d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
1336  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1337  d.vkGetImageSparseMemoryRequirements( m_device,
1338  static_cast<VkImage>( image ),
1339  &sparseMemoryRequirementCount,
1340  reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
1341 
1342  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
1343  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
1344  {
1345  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1346  }
1347  return sparseMemoryRequirements;
1348  }
1349 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1350 
1351  template <typename Dispatch>
1352  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
1357  uint32_t * pPropertyCount,
1358  VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
1359  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1360  {
1361  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1362  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1363  static_cast<VkFormat>( format ),
1364  static_cast<VkImageType>( type ),
1365  static_cast<VkSampleCountFlagBits>( samples ),
1366  static_cast<VkImageUsageFlags>( usage ),
1367  static_cast<VkImageTiling>( tiling ),
1368  pPropertyCount,
1369  reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
1370  }
1371 
1372 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1373  template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
1374  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
1375  PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
1380  Dispatch const & d ) const
1381  {
1382  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1383 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1384  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties &&
1385  "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" );
1386 # endif
1387 
1388  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
1389  uint32_t propertyCount;
1390  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1391  static_cast<VkFormat>( format ),
1392  static_cast<VkImageType>( type ),
1393  static_cast<VkSampleCountFlagBits>( samples ),
1394  static_cast<VkImageUsageFlags>( usage ),
1395  static_cast<VkImageTiling>( tiling ),
1396  &propertyCount,
1397  nullptr );
1398  properties.resize( propertyCount );
1399  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1400  static_cast<VkFormat>( format ),
1401  static_cast<VkImageType>( type ),
1402  static_cast<VkSampleCountFlagBits>( samples ),
1403  static_cast<VkImageUsageFlags>( usage ),
1404  static_cast<VkImageTiling>( tiling ),
1405  &propertyCount,
1406  reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
1407 
1408  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
1409  if ( propertyCount < properties.size() )
1410  {
1411  properties.resize( propertyCount );
1412  }
1413  return properties;
1414  }
1415 
1416  template <
1417  typename SparseImageFormatPropertiesAllocator,
1418  typename Dispatch,
1419  typename std::enable_if<std::is_same<typename SparseImageFormatPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value,
1420  int>::type>
1421  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
1422  PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
1427  SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
1428  Dispatch const & d ) const
1429  {
1430  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1431 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1432  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties &&
1433  "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" );
1434 # endif
1435 
1436  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
1437  uint32_t propertyCount;
1438  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1439  static_cast<VkFormat>( format ),
1440  static_cast<VkImageType>( type ),
1441  static_cast<VkSampleCountFlagBits>( samples ),
1442  static_cast<VkImageUsageFlags>( usage ),
1443  static_cast<VkImageTiling>( tiling ),
1444  &propertyCount,
1445  nullptr );
1446  properties.resize( propertyCount );
1447  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1448  static_cast<VkFormat>( format ),
1449  static_cast<VkImageType>( type ),
1450  static_cast<VkSampleCountFlagBits>( samples ),
1451  static_cast<VkImageUsageFlags>( usage ),
1452  static_cast<VkImageTiling>( tiling ),
1453  &propertyCount,
1454  reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
1455 
1456  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
1457  if ( propertyCount < properties.size() )
1458  {
1459  properties.resize( propertyCount );
1460  }
1461  return properties;
1462  }
1463 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1464 
1465  template <typename Dispatch>
1466  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount,
1467  const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
1468  VULKAN_HPP_NAMESPACE::Fence fence,
1469  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1470  {
1471  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1472  return static_cast<Result>(
1473  d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
1474  }
1475 
1476 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1477  template <typename Dispatch>
1479  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
1480  {
1481  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1482 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1483  VULKAN_HPP_ASSERT( d.vkQueueBindSparse && "Function <vkQueueBindSparse> requires <VK_VERSION_1_0>" );
1484 # endif
1485 
1487  d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
1488  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
1489 
1490  return createResultValueType( result );
1491  }
1492 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1493 
1494  template <typename Dispatch>
1495  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
1496  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1497  VULKAN_HPP_NAMESPACE::Fence * pFence,
1498  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1499  {
1500  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1501  return static_cast<Result>( d.vkCreateFence( m_device,
1502  reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ),
1503  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1504  reinterpret_cast<VkFence *>( pFence ) ) );
1505  }
1506 
1507 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1508  template <typename Dispatch>
1510  const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1511  {
1512  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1513 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1514  VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" );
1515 # endif
1516 
1517  VULKAN_HPP_NAMESPACE::Fence fence;
1519  d.vkCreateFence( m_device,
1520  reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
1521  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1522  reinterpret_cast<VkFence *>( &fence ) ) );
1523  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
1524 
1525  return createResultValueType( result, fence );
1526  }
1527 
1528 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1529  template <typename Dispatch>
1531  const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1532  {
1533  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1534 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1535  VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" );
1536 # endif
1537 
1538  VULKAN_HPP_NAMESPACE::Fence fence;
1540  d.vkCreateFence( m_device,
1541  reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
1542  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1543  reinterpret_cast<VkFence *>( &fence ) ) );
1544  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" );
1545 
1546  return createResultValueType( result,
1548  }
1549 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
1550 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1551 
1552  template <typename Dispatch>
1553  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
1554  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1555  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1556  {
1557  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1558  d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1559  }
1560 
1561 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1562  template <typename Dispatch>
1563  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
1565  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1566  {
1567  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1568 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1569  VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" );
1570 # endif
1571 
1572  d.vkDestroyFence( m_device,
1573  static_cast<VkFence>( fence ),
1574  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1575  }
1576 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1577 
1578  template <typename Dispatch>
1579  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
1580  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1581  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1582  {
1583  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1584  d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1585  }
1586 
1587 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1588  template <typename Dispatch>
1589  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
1591  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1592  {
1593  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1594 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1595  VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" );
1596 # endif
1597 
1598  d.vkDestroyFence( m_device,
1599  static_cast<VkFence>( fence ),
1600  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1601  }
1602 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1603 
1604  template <typename Dispatch>
1605  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount,
1606  const VULKAN_HPP_NAMESPACE::Fence * pFences,
1607  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1608  {
1609  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1610  return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
1611  }
1612 
1613 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1614  template <typename Dispatch>
1616  Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
1617  {
1618  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1619 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1620  VULKAN_HPP_ASSERT( d.vkResetFences && "Function <vkResetFences> requires <VK_VERSION_1_0>" );
1621 # endif
1622 
1624  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) );
1625  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
1626 
1627  return createResultValueType( result );
1628  }
1629 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1630 
1631 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1632  template <typename Dispatch>
1633  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1634  {
1635  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1636  return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
1637  }
1638 #else
1639  template <typename Dispatch>
1640  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
1641  {
1642  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1644  VULKAN_HPP_ASSERT( d.vkGetFenceStatus && "Function <vkGetFenceStatus> requires <VK_VERSION_1_0>" );
1645 # endif
1646 
1647  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
1648  resultCheck(
1649  result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
1650 
1651  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
1652  }
1653 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1654 
1655  template <typename Dispatch>
1656  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount,
1657  const VULKAN_HPP_NAMESPACE::Fence * pFences,
1659  uint64_t timeout,
1660  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1661  {
1662  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1663  return static_cast<Result>(
1664  d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
1665  }
1666 
1667 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1668  template <typename Dispatch>
1670  Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
1672  uint64_t timeout,
1673  Dispatch const & d ) const
1674  {
1675  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1676 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1677  VULKAN_HPP_ASSERT( d.vkWaitForFences && "Function <vkWaitForFences> requires <VK_VERSION_1_0>" );
1678 # endif
1679 
1681  d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) );
1682  resultCheck(
1683  result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
1684 
1685  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
1686  }
1687 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1688 
1689  template <typename Dispatch>
1690  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
1691  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1692  VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
1693  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1694  {
1695  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1696  return static_cast<Result>( d.vkCreateSemaphore( m_device,
1697  reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ),
1698  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1699  reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
1700  }
1701 
1702 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1703  template <typename Dispatch>
1705  Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
1707  Dispatch const & d ) const
1708  {
1709  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1710 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1711  VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" );
1712 # endif
1713 
1714  VULKAN_HPP_NAMESPACE::Semaphore semaphore;
1716  d.vkCreateSemaphore( m_device,
1717  reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
1718  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1719  reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
1720  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
1721 
1722  return createResultValueType( result, semaphore );
1723  }
1724 
1725 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1726  template <typename Dispatch>
1728  Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
1730  Dispatch const & d ) const
1731  {
1732  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1733 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1734  VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" );
1735 # endif
1736 
1737  VULKAN_HPP_NAMESPACE::Semaphore semaphore;
1739  d.vkCreateSemaphore( m_device,
1740  reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
1741  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1742  reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
1743  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" );
1744 
1745  return createResultValueType(
1747  }
1748 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
1749 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1750 
1751  template <typename Dispatch>
1752  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
1753  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1754  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1755  {
1756  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1757  d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1758  }
1759 
1760 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1761  template <typename Dispatch>
1762  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
1764  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1765  {
1766  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1767 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1768  VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" );
1769 # endif
1770 
1771  d.vkDestroySemaphore( m_device,
1772  static_cast<VkSemaphore>( semaphore ),
1773  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1774  }
1775 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1776 
1777  template <typename Dispatch>
1778  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
1779  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1780  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1781  {
1782  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1783  d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1784  }
1785 
1786 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1787  template <typename Dispatch>
1788  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
1790  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1791  {
1792  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1793 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1794  VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" );
1795 # endif
1796 
1797  d.vkDestroySemaphore( m_device,
1798  static_cast<VkSemaphore>( semaphore ),
1799  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1800  }
1801 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1802 
1803  template <typename Dispatch>
1804  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
1805  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1806  VULKAN_HPP_NAMESPACE::Event * pEvent,
1807  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1808  {
1809  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1810  return static_cast<Result>( d.vkCreateEvent( m_device,
1811  reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ),
1812  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1813  reinterpret_cast<VkEvent *>( pEvent ) ) );
1814  }
1815 
1816 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1817  template <typename Dispatch>
1819  const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1820  {
1821  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1822 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1823  VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" );
1824 # endif
1825 
1826  VULKAN_HPP_NAMESPACE::Event event;
1828  d.vkCreateEvent( m_device,
1829  reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
1830  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1831  reinterpret_cast<VkEvent *>( &event ) ) );
1832  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
1833 
1834  return createResultValueType( result, event );
1835  }
1836 
1837 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1838  template <typename Dispatch>
1840  const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1841  {
1842  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1843 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1844  VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" );
1845 # endif
1846 
1847  VULKAN_HPP_NAMESPACE::Event event;
1849  d.vkCreateEvent( m_device,
1850  reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
1851  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1852  reinterpret_cast<VkEvent *>( &event ) ) );
1853  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" );
1854 
1855  return createResultValueType( result,
1857  }
1858 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
1859 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1860 
1861  template <typename Dispatch>
1862  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
1863  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1864  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1865  {
1866  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1867  d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1868  }
1869 
1870 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1871  template <typename Dispatch>
1872  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
1874  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1875  {
1876  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1877 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1878  VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" );
1879 # endif
1880 
1881  d.vkDestroyEvent( m_device,
1882  static_cast<VkEvent>( event ),
1883  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1884  }
1885 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1886 
1887  template <typename Dispatch>
1888  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
1889  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1890  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1891  {
1892  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1893  d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1894  }
1895 
1896 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1897  template <typename Dispatch>
1898  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
1900  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1901  {
1902  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1903 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1904  VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" );
1905 # endif
1906 
1907  d.vkDestroyEvent( m_device,
1908  static_cast<VkEvent>( event ),
1909  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1910  }
1911 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1912 
1913 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1914  template <typename Dispatch>
1915  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1916  {
1917  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1918  return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
1919  }
1920 #else
1921  template <typename Dispatch>
1922  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
1923  {
1924  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1925 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1926  VULKAN_HPP_ASSERT( d.vkGetEventStatus && "Function <vkGetEventStatus> requires <VK_VERSION_1_0>" );
1927 # endif
1928 
1929  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
1930  resultCheck(
1931  result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
1932 
1933  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
1934  }
1935 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1936 
1937 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1938  template <typename Dispatch>
1939  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1940  {
1941  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1942  return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
1943  }
1944 #else
1945  template <typename Dispatch>
1946  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event,
1947  Dispatch const & d ) const
1948  {
1949  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1950 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1951  VULKAN_HPP_ASSERT( d.vkSetEvent && "Function <vkSetEvent> requires <VK_VERSION_1_0>" );
1952 # endif
1953 
1954  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
1955  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
1956 
1957  return createResultValueType( result );
1958  }
1959 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1960 
1961 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1962  template <typename Dispatch>
1963  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1964  {
1965  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1966  return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
1967  }
1968 #else
1969  template <typename Dispatch>
1970  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
1971  {
1972  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1973 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
1974  VULKAN_HPP_ASSERT( d.vkResetEvent && "Function <vkResetEvent> requires <VK_VERSION_1_0>" );
1975 # endif
1976 
1977  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
1978  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
1979 
1980  return createResultValueType( result );
1981  }
1982 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1983 
1984  template <typename Dispatch>
1985  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
1986  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1987  VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
1988  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1989  {
1990  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1991  return static_cast<Result>( d.vkCreateQueryPool( m_device,
1992  reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ),
1993  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1994  reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
1995  }
1996 
1997 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1998  template <typename Dispatch>
2000  Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
2002  Dispatch const & d ) const
2003  {
2004  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2005 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2006  VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" );
2007 # endif
2008 
2009  VULKAN_HPP_NAMESPACE::QueryPool queryPool;
2011  d.vkCreateQueryPool( m_device,
2012  reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
2013  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2014  reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
2015  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
2016 
2017  return createResultValueType( result, queryPool );
2018  }
2019 
2020 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2021  template <typename Dispatch>
2023  Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
2025  Dispatch const & d ) const
2026  {
2027  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2028 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2029  VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" );
2030 # endif
2031 
2032  VULKAN_HPP_NAMESPACE::QueryPool queryPool;
2034  d.vkCreateQueryPool( m_device,
2035  reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
2036  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2037  reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
2038  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" );
2039 
2040  return createResultValueType(
2042  }
2043 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2044 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2045 
2046  template <typename Dispatch>
2047  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
2048  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2049  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2050  {
2051  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2052  d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2053  }
2054 
2055 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2056  template <typename Dispatch>
2057  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
2059  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2060  {
2061  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2062 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2063  VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" );
2064 # endif
2065 
2066  d.vkDestroyQueryPool( m_device,
2067  static_cast<VkQueryPool>( queryPool ),
2068  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2069  }
2070 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2071 
2072  template <typename Dispatch>
2073  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
2074  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2075  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2076  {
2077  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2078  d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2079  }
2080 
2081 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2082  template <typename Dispatch>
2083  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
2085  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2086  {
2087  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2088 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2089  VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" );
2090 # endif
2091 
2092  d.vkDestroyQueryPool( m_device,
2093  static_cast<VkQueryPool>( queryPool ),
2094  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2095  }
2096 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2097 
2098  template <typename Dispatch>
2099  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
2100  uint32_t firstQuery,
2101  uint32_t queryCount,
2102  size_t dataSize,
2103  void * pData,
2106  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2107  {
2108  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2109  return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
2110  static_cast<VkQueryPool>( queryPool ),
2111  firstQuery,
2112  queryCount,
2113  dataSize,
2114  pData,
2115  static_cast<VkDeviceSize>( stride ),
2116  static_cast<VkQueryResultFlags>( flags ) ) );
2117  }
2118 
2119 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2120  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
2122  Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
2123  uint32_t firstQuery,
2124  uint32_t queryCount,
2125  size_t dataSize,
2128  Dispatch const & d ) const
2129  {
2130  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2131 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2132  VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" );
2133 # endif
2134 
2135  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
2136  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
2137  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device,
2138  static_cast<VkQueryPool>( queryPool ),
2139  firstQuery,
2140  queryCount,
2141  data.size() * sizeof( DataType ),
2142  reinterpret_cast<void *>( data.data() ),
2143  static_cast<VkDeviceSize>( stride ),
2144  static_cast<VkQueryResultFlags>( flags ) ) );
2145  resultCheck( result,
2146  VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
2147  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
2148 
2150  }
2151 
2152  template <typename DataType, typename Dispatch>
2153  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
2154  uint32_t firstQuery,
2155  uint32_t queryCount,
2158  Dispatch const & d ) const
2159  {
2160  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2161 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2162  VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" );
2163 # endif
2164 
2165  DataType data;
2166  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device,
2167  static_cast<VkQueryPool>( queryPool ),
2168  firstQuery,
2169  queryCount,
2170  sizeof( DataType ),
2171  reinterpret_cast<void *>( &data ),
2172  static_cast<VkDeviceSize>( stride ),
2173  static_cast<VkQueryResultFlags>( flags ) ) );
2174  resultCheck(
2175  result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
2176 
2177  return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
2178  }
2179 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2180 
2181  template <typename Dispatch>
2182  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
2183  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2184  VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
2185  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2186  {
2187  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2188  return static_cast<Result>( d.vkCreateBuffer( m_device,
2189  reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ),
2190  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2191  reinterpret_cast<VkBuffer *>( pBuffer ) ) );
2192  }
2193 
2194 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2195  template <typename Dispatch>
2197  const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2198  {
2199  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2200 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2201  VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" );
2202 # endif
2203 
2204  VULKAN_HPP_NAMESPACE::Buffer buffer;
2206  d.vkCreateBuffer( m_device,
2207  reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
2208  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2209  reinterpret_cast<VkBuffer *>( &buffer ) ) );
2210  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
2211 
2212  return createResultValueType( result, buffer );
2213  }
2214 
2215 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2216  template <typename Dispatch>
2218  const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2219  {
2220  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2221 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2222  VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" );
2223 # endif
2224 
2225  VULKAN_HPP_NAMESPACE::Buffer buffer;
2227  d.vkCreateBuffer( m_device,
2228  reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
2229  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2230  reinterpret_cast<VkBuffer *>( &buffer ) ) );
2231  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" );
2232 
2233  return createResultValueType( result,
2235  }
2236 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2237 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2238 
2239  template <typename Dispatch>
2240  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
2241  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2242  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2243  {
2244  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2245  d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2246  }
2247 
2248 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2249  template <typename Dispatch>
2250  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
2252  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2253  {
2254  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2255 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2256  VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" );
2257 # endif
2258 
2259  d.vkDestroyBuffer( m_device,
2260  static_cast<VkBuffer>( buffer ),
2261  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2262  }
2263 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2264 
2265  template <typename Dispatch>
2266  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
2267  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2268  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2269  {
2270  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2271  d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2272  }
2273 
2274 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2275  template <typename Dispatch>
2276  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
2278  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2279  {
2280  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2281 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2282  VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" );
2283 # endif
2284 
2285  d.vkDestroyBuffer( m_device,
2286  static_cast<VkBuffer>( buffer ),
2287  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2288  }
2289 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2290 
2291  template <typename Dispatch>
2292  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
2293  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2294  VULKAN_HPP_NAMESPACE::BufferView * pView,
2295  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2296  {
2297  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2298  return static_cast<Result>( d.vkCreateBufferView( m_device,
2299  reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ),
2300  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2301  reinterpret_cast<VkBufferView *>( pView ) ) );
2302  }
2303 
2304 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2305  template <typename Dispatch>
2307  Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
2309  Dispatch const & d ) const
2310  {
2311  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2312 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2313  VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" );
2314 # endif
2315 
2316  VULKAN_HPP_NAMESPACE::BufferView view;
2318  d.vkCreateBufferView( m_device,
2319  reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
2320  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2321  reinterpret_cast<VkBufferView *>( &view ) ) );
2322  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
2323 
2324  return createResultValueType( result, view );
2325  }
2326 
2327 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2328  template <typename Dispatch>
2330  Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
2332  Dispatch const & d ) const
2333  {
2334  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2335 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2336  VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" );
2337 # endif
2338 
2339  VULKAN_HPP_NAMESPACE::BufferView view;
2341  d.vkCreateBufferView( m_device,
2342  reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
2343  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2344  reinterpret_cast<VkBufferView *>( &view ) ) );
2345  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" );
2346 
2347  return createResultValueType( result,
2349  }
2350 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2351 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2352 
2353  template <typename Dispatch>
2354  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
2355  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2356  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2357  {
2358  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2359  d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2360  }
2361 
2362 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2363  template <typename Dispatch>
2364  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
2366  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2367  {
2368  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2369 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2370  VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" );
2371 # endif
2372 
2373  d.vkDestroyBufferView( m_device,
2374  static_cast<VkBufferView>( bufferView ),
2375  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2376  }
2377 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2378 
2379  template <typename Dispatch>
2380  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
2381  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2382  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2383  {
2384  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2385  d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2386  }
2387 
2388 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2389  template <typename Dispatch>
2390  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
2392  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2393  {
2394  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2395 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2396  VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" );
2397 # endif
2398 
2399  d.vkDestroyBufferView( m_device,
2400  static_cast<VkBufferView>( bufferView ),
2401  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2402  }
2403 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2404 
2405  template <typename Dispatch>
2406  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,
2407  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2408  VULKAN_HPP_NAMESPACE::Image * pImage,
2409  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2410  {
2411  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2412  return static_cast<Result>( d.vkCreateImage( m_device,
2413  reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ),
2414  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2415  reinterpret_cast<VkImage *>( pImage ) ) );
2416  }
2417 
2418 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2419  template <typename Dispatch>
2421  const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2422  {
2423  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2424 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2425  VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" );
2426 # endif
2427 
2428  VULKAN_HPP_NAMESPACE::Image image;
2430  d.vkCreateImage( m_device,
2431  reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
2432  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2433  reinterpret_cast<VkImage *>( &image ) ) );
2434  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
2435 
2436  return createResultValueType( result, image );
2437  }
2438 
2439 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2440  template <typename Dispatch>
2442  const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2443  {
2444  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2445 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2446  VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" );
2447 # endif
2448 
2449  VULKAN_HPP_NAMESPACE::Image image;
2451  d.vkCreateImage( m_device,
2452  reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
2453  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2454  reinterpret_cast<VkImage *>( &image ) ) );
2455  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" );
2456 
2457  return createResultValueType( result,
2459  }
2460 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2461 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2462 
2463  template <typename Dispatch>
2464  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
2465  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2466  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2467  {
2468  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2469  d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2470  }
2471 
2472 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2473  template <typename Dispatch>
2474  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
2476  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2477  {
2478  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2479 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2480  VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" );
2481 # endif
2482 
2483  d.vkDestroyImage( m_device,
2484  static_cast<VkImage>( image ),
2485  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2486  }
2487 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2488 
2489  template <typename Dispatch>
2490  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
2491  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2492  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2493  {
2494  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2495  d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2496  }
2497 
2498 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2499  template <typename Dispatch>
2500  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
2502  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2503  {
2504  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2505 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2506  VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" );
2507 # endif
2508 
2509  d.vkDestroyImage( m_device,
2510  static_cast<VkImage>( image ),
2511  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2512  }
2513 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2514 
2515  template <typename Dispatch>
2516  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
2517  const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
2518  VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,
2519  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2520  {
2521  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2522  d.vkGetImageSubresourceLayout( m_device,
2523  static_cast<VkImage>( image ),
2524  reinterpret_cast<const VkImageSubresource *>( pSubresource ),
2525  reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
2526  }
2527 
2528 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2529  template <typename Dispatch>
2530  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout(
2531  VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2532  {
2533  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2534 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2535  VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout && "Function <vkGetImageSubresourceLayout> requires <VK_VERSION_1_0>" );
2536 # endif
2537 
2538  VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
2539  d.vkGetImageSubresourceLayout( m_device,
2540  static_cast<VkImage>( image ),
2541  reinterpret_cast<const VkImageSubresource *>( &subresource ),
2542  reinterpret_cast<VkSubresourceLayout *>( &layout ) );
2543 
2544  return layout;
2545  }
2546 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2547 
2548  template <typename Dispatch>
2549  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
2550  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2551  VULKAN_HPP_NAMESPACE::ImageView * pView,
2552  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2553  {
2554  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2555  return static_cast<Result>( d.vkCreateImageView( m_device,
2556  reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ),
2557  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2558  reinterpret_cast<VkImageView *>( pView ) ) );
2559  }
2560 
2561 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2562  template <typename Dispatch>
2564  Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
2566  Dispatch const & d ) const
2567  {
2568  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2569 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2570  VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" );
2571 # endif
2572 
2573  VULKAN_HPP_NAMESPACE::ImageView view;
2575  d.vkCreateImageView( m_device,
2576  reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
2577  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2578  reinterpret_cast<VkImageView *>( &view ) ) );
2579  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
2580 
2581  return createResultValueType( result, view );
2582  }
2583 
2584 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2585  template <typename Dispatch>
2587  Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
2589  Dispatch const & d ) const
2590  {
2591  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2592 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2593  VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" );
2594 # endif
2595 
2596  VULKAN_HPP_NAMESPACE::ImageView view;
2598  d.vkCreateImageView( m_device,
2599  reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
2600  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2601  reinterpret_cast<VkImageView *>( &view ) ) );
2602  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" );
2603 
2604  return createResultValueType( result,
2606  }
2607 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2608 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2609 
2610  template <typename Dispatch>
2611  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
2612  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2613  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2614  {
2615  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2616  d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2617  }
2618 
2619 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2620  template <typename Dispatch>
2621  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
2623  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2624  {
2625  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2626 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2627  VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" );
2628 # endif
2629 
2630  d.vkDestroyImageView( m_device,
2631  static_cast<VkImageView>( imageView ),
2632  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2633  }
2634 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2635 
2636  template <typename Dispatch>
2637  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
2638  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2639  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2640  {
2641  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2642  d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2643  }
2644 
2645 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2646  template <typename Dispatch>
2647  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
2649  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2650  {
2651  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2652 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2653  VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" );
2654 # endif
2655 
2656  d.vkDestroyImageView( m_device,
2657  static_cast<VkImageView>( imageView ),
2658  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2659  }
2660 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2661 
2662  template <typename Dispatch>
2663  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
2664  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2665  VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
2666  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2667  {
2668  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2669  return static_cast<Result>( d.vkCreateShaderModule( m_device,
2670  reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
2671  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2672  reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
2673  }
2674 
2675 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2676  template <typename Dispatch>
2678  Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
2680  Dispatch const & d ) const
2681  {
2682  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2683 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2684  VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" );
2685 # endif
2686 
2687  VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
2689  d.vkCreateShaderModule( m_device,
2690  reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
2691  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2692  reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
2693  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
2694 
2695  return createResultValueType( result, shaderModule );
2696  }
2697 
2698 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2699  template <typename Dispatch>
2701  Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
2703  Dispatch const & d ) const
2704  {
2705  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2706 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2707  VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" );
2708 # endif
2709 
2710  VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
2712  d.vkCreateShaderModule( m_device,
2713  reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
2714  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2715  reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
2716  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" );
2717 
2718  return createResultValueType(
2719  result, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2720  }
2721 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2722 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2723 
2724  template <typename Dispatch>
2725  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
2726  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2727  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2728  {
2729  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2730  d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2731  }
2732 
2733 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2734  template <typename Dispatch>
2735  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
2737  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2738  {
2739  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2740 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2741  VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" );
2742 # endif
2743 
2744  d.vkDestroyShaderModule( m_device,
2745  static_cast<VkShaderModule>( shaderModule ),
2746  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2747  }
2748 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2749 
2750  template <typename Dispatch>
2751  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
2752  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2753  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2754  {
2755  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2756  d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2757  }
2758 
2759 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2760  template <typename Dispatch>
2761  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
2763  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2764  {
2765  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2766 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2767  VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" );
2768 # endif
2769 
2770  d.vkDestroyShaderModule( m_device,
2771  static_cast<VkShaderModule>( shaderModule ),
2772  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2773  }
2774 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2775 
2776  template <typename Dispatch>
2777  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
2778  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2779  VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
2780  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2781  {
2782  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2783  return static_cast<Result>( d.vkCreatePipelineCache( m_device,
2784  reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
2785  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2786  reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
2787  }
2788 
2789 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2790  template <typename Dispatch>
2792  Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
2794  Dispatch const & d ) const
2795  {
2796  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2797 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2798  VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" );
2799 # endif
2800 
2801  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
2803  d.vkCreatePipelineCache( m_device,
2804  reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
2805  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2806  reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
2807  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
2808 
2809  return createResultValueType( result, pipelineCache );
2810  }
2811 
2812 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2813  template <typename Dispatch>
2815  Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
2817  Dispatch const & d ) const
2818  {
2819  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2820 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2821  VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" );
2822 # endif
2823 
2824  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
2826  d.vkCreatePipelineCache( m_device,
2827  reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
2828  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2829  reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
2830  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" );
2831 
2832  return createResultValueType(
2833  result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2834  }
2835 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2836 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2837 
2838  template <typename Dispatch>
2839  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
2840  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2841  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2842  {
2843  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2844  d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2845  }
2846 
2847 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2848  template <typename Dispatch>
2849  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
2851  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2852  {
2853  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2854 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2855  VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" );
2856 # endif
2857 
2858  d.vkDestroyPipelineCache( m_device,
2859  static_cast<VkPipelineCache>( pipelineCache ),
2860  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2861  }
2862 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2863 
2864  template <typename Dispatch>
2865  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
2866  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2867  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2868  {
2869  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2870  d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2871  }
2872 
2873 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2874  template <typename Dispatch>
2875  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
2877  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2878  {
2879  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2880 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2881  VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" );
2882 # endif
2883 
2884  d.vkDestroyPipelineCache( m_device,
2885  static_cast<VkPipelineCache>( pipelineCache ),
2886  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2887  }
2888 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2889 
2890  template <typename Dispatch>
2891  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
2892  size_t * pDataSize,
2893  void * pData,
2894  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2895  {
2896  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2897  return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
2898  }
2899 
2900 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2901  template <typename Uint8_tAllocator, typename Dispatch>
2903  Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
2904  {
2905  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2906 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2907  VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" );
2908 # endif
2909 
2910  std::vector<uint8_t, Uint8_tAllocator> data;
2911  size_t dataSize;
2913  do
2914  {
2915  result =
2916  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
2917  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
2918  {
2919  data.resize( dataSize );
2920  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
2921  d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
2922  }
2923  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
2924  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
2925  VULKAN_HPP_ASSERT( dataSize <= data.size() );
2926  if ( dataSize < data.size() )
2927  {
2928  data.resize( dataSize );
2929  }
2930  return createResultValueType( result, data );
2931  }
2932 
2933  template <typename Uint8_tAllocator,
2934  typename Dispatch,
2935  typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type>
2937  Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
2938  {
2939  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2940 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2941  VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" );
2942 # endif
2943 
2944  std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
2945  size_t dataSize;
2947  do
2948  {
2949  result =
2950  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
2951  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
2952  {
2953  data.resize( dataSize );
2954  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
2955  d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
2956  }
2957  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
2958  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
2959  VULKAN_HPP_ASSERT( dataSize <= data.size() );
2960  if ( dataSize < data.size() )
2961  {
2962  data.resize( dataSize );
2963  }
2964  return createResultValueType( result, data );
2965  }
2966 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2967 
2968  template <typename Dispatch>
2969  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
2970  uint32_t srcCacheCount,
2971  const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
2972  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2973  {
2974  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2975  return static_cast<Result>(
2976  d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
2977  }
2978 
2979 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2980  template <typename Dispatch>
2982  Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
2983  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
2984  Dispatch const & d ) const
2985  {
2986  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2987 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
2988  VULKAN_HPP_ASSERT( d.vkMergePipelineCaches && "Function <vkMergePipelineCaches> requires <VK_VERSION_1_0>" );
2989 # endif
2990 
2991  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergePipelineCaches(
2992  m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) );
2993  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
2994 
2995  return createResultValueType( result );
2996  }
2997 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2998 
2999  template <typename Dispatch>
3000  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3001  uint32_t createInfoCount,
3002  const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
3003  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3004  VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
3005  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3006  {
3007  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3008  return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device,
3009  static_cast<VkPipelineCache>( pipelineCache ),
3010  createInfoCount,
3011  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
3012  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3013  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
3014  }
3015 
3016 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3017  template <typename PipelineAllocator, typename Dispatch>
3019  Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3020  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
3022  Dispatch const & d ) const
3023  {
3024  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3025 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3026  VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" );
3027 # endif
3028 
3029  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
3030  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines(
3031  m_device,
3032  static_cast<VkPipelineCache>( pipelineCache ),
3033  createInfos.size(),
3034  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
3035  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3036  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
3037  resultCheck( result,
3038  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
3039  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3040 
3042  }
3043 
3044  template <typename PipelineAllocator,
3045  typename Dispatch,
3046  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type>
3048  Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3049  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
3051  PipelineAllocator & pipelineAllocator,
3052  Dispatch const & d ) const
3053  {
3054  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3055 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3056  VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" );
3057 # endif
3058 
3059  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
3060  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines(
3061  m_device,
3062  static_cast<VkPipelineCache>( pipelineCache ),
3063  createInfos.size(),
3064  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
3065  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3066  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
3067  resultCheck( result,
3068  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
3069  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3070 
3072  }
3073 
3074  template <typename Dispatch>
3076  Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3077  const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
3079  Dispatch const & d ) const
3080  {
3081  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3082 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3083  VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" );
3084 # endif
3085 
3086  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
3087  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines(
3088  m_device,
3089  static_cast<VkPipelineCache>( pipelineCache ),
3090  1,
3091  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
3092  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3093  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
3094  resultCheck( result,
3095  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline",
3096  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3097 
3098  return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
3099  }
3100 
3101 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3102  template <typename Dispatch, typename PipelineAllocator>
3104  Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3105  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
3107  Dispatch const & d ) const
3108  {
3109  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3110 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3111  VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" );
3112 # endif
3113 
3114  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
3115  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines(
3116  m_device,
3117  static_cast<VkPipelineCache>( pipelineCache ),
3118  createInfos.size(),
3119  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
3120  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3121  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
3122  resultCheck( result,
3123  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
3124  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3125  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
3126  uniquePipelines.reserve( createInfos.size() );
3127  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
3128  for ( auto const & pipeline : pipelines )
3129  {
3130  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
3131  }
3133  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
3134  }
3135 
3136  template <
3137  typename Dispatch,
3138  typename PipelineAllocator,
3139  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type>
3141  Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3142  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
3144  PipelineAllocator & pipelineAllocator,
3145  Dispatch const & d ) const
3146  {
3147  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3148 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3149  VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" );
3150 # endif
3151 
3152  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
3153  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines(
3154  m_device,
3155  static_cast<VkPipelineCache>( pipelineCache ),
3156  createInfos.size(),
3157  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
3158  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3159  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
3160  resultCheck( result,
3161  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
3162  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3163  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
3164  uniquePipelines.reserve( createInfos.size() );
3165  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
3166  for ( auto const & pipeline : pipelines )
3167  {
3168  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
3169  }
3171  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
3172  }
3173 
3174  template <typename Dispatch>
3176  Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3177  const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
3179  Dispatch const & d ) const
3180  {
3181  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3182 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3183  VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" );
3184 # endif
3185 
3186  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
3187  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines(
3188  m_device,
3189  static_cast<VkPipelineCache>( pipelineCache ),
3190  1,
3191  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
3192  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3193  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
3194  resultCheck( result,
3195  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique",
3196  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3197 
3199  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3201  }
3202 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3203 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3204 
3205  template <typename Dispatch>
3206  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3207  uint32_t createInfoCount,
3208  const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
3209  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3210  VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
3211  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3212  {
3213  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3214  return static_cast<Result>( d.vkCreateComputePipelines( m_device,
3215  static_cast<VkPipelineCache>( pipelineCache ),
3216  createInfoCount,
3217  reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
3218  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3219  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
3220  }
3221 
3222 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3223  template <typename PipelineAllocator, typename Dispatch>
3225  Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3226  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
3228  Dispatch const & d ) const
3229  {
3230  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3231 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3232  VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" );
3233 # endif
3234 
3235  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
3236  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines(
3237  m_device,
3238  static_cast<VkPipelineCache>( pipelineCache ),
3239  createInfos.size(),
3240  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
3241  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3242  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
3243  resultCheck( result,
3244  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
3245  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3246 
3248  }
3249 
3250  template <typename PipelineAllocator,
3251  typename Dispatch,
3252  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type>
3254  Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3255  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
3257  PipelineAllocator & pipelineAllocator,
3258  Dispatch const & d ) const
3259  {
3260  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3261 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3262  VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" );
3263 # endif
3264 
3265  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
3266  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines(
3267  m_device,
3268  static_cast<VkPipelineCache>( pipelineCache ),
3269  createInfos.size(),
3270  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
3271  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3272  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
3273  resultCheck( result,
3274  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
3275  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3276 
3278  }
3279 
3280  template <typename Dispatch>
3282  Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3283  const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
3285  Dispatch const & d ) const
3286  {
3287  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3288 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3289  VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" );
3290 # endif
3291 
3292  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
3293  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines(
3294  m_device,
3295  static_cast<VkPipelineCache>( pipelineCache ),
3296  1,
3297  reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
3298  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3299  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
3300  resultCheck( result,
3301  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline",
3302  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3303 
3304  return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
3305  }
3306 
3307 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3308  template <typename Dispatch, typename PipelineAllocator>
3310  Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3311  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
3313  Dispatch const & d ) const
3314  {
3315  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3316 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3317  VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" );
3318 # endif
3319 
3320  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
3321  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines(
3322  m_device,
3323  static_cast<VkPipelineCache>( pipelineCache ),
3324  createInfos.size(),
3325  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
3326  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3327  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
3328  resultCheck( result,
3329  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
3330  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3331  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
3332  uniquePipelines.reserve( createInfos.size() );
3333  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
3334  for ( auto const & pipeline : pipelines )
3335  {
3336  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
3337  }
3339  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
3340  }
3341 
3342  template <
3343  typename Dispatch,
3344  typename PipelineAllocator,
3345  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type>
3347  Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3348  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
3350  PipelineAllocator & pipelineAllocator,
3351  Dispatch const & d ) const
3352  {
3353  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3354 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3355  VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" );
3356 # endif
3357 
3358  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
3359  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines(
3360  m_device,
3361  static_cast<VkPipelineCache>( pipelineCache ),
3362  createInfos.size(),
3363  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
3364  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3365  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
3366  resultCheck( result,
3367  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
3368  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3369  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
3370  uniquePipelines.reserve( createInfos.size() );
3371  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
3372  for ( auto const & pipeline : pipelines )
3373  {
3374  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
3375  }
3377  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
3378  }
3379 
3380  template <typename Dispatch>
3382  Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
3383  const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
3385  Dispatch const & d ) const
3386  {
3387  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3388 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3389  VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" );
3390 # endif
3391 
3392  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
3393  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines(
3394  m_device,
3395  static_cast<VkPipelineCache>( pipelineCache ),
3396  1,
3397  reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
3398  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3399  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
3400  resultCheck( result,
3401  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique",
3402  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
3403 
3405  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3407  }
3408 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3409 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3410 
3411  template <typename Dispatch>
3412  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
3413  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3414  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3415  {
3416  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3417  d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3418  }
3419 
3420 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3421  template <typename Dispatch>
3422  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
3424  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3425  {
3426  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3427 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3428  VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" );
3429 # endif
3430 
3431  d.vkDestroyPipeline( m_device,
3432  static_cast<VkPipeline>( pipeline ),
3433  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3434  }
3435 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3436 
3437  template <typename Dispatch>
3438  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
3439  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3440  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3441  {
3442  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3443  d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3444  }
3445 
3446 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3447  template <typename Dispatch>
3448  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
3450  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3451  {
3452  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3453 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3454  VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" );
3455 # endif
3456 
3457  d.vkDestroyPipeline( m_device,
3458  static_cast<VkPipeline>( pipeline ),
3459  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3460  }
3461 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3462 
3463  template <typename Dispatch>
3464  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
3465  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3466  VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
3467  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3468  {
3469  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3470  return static_cast<Result>( d.vkCreatePipelineLayout( m_device,
3471  reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
3472  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3473  reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
3474  }
3475 
3476 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3477  template <typename Dispatch>
3479  Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
3481  Dispatch const & d ) const
3482  {
3483  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3484 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3485  VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" );
3486 # endif
3487 
3488  VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
3490  d.vkCreatePipelineLayout( m_device,
3491  reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
3492  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3493  reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
3494  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
3495 
3496  return createResultValueType( result, pipelineLayout );
3497  }
3498 
3499 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3500  template <typename Dispatch>
3502  Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
3504  Dispatch const & d ) const
3505  {
3506  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3507 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3508  VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" );
3509 # endif
3510 
3511  VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
3513  d.vkCreatePipelineLayout( m_device,
3514  reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
3515  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3516  reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
3517  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" );
3518 
3519  return createResultValueType(
3520  result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
3521  }
3522 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3523 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3524 
3525  template <typename Dispatch>
3526  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
3527  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3528  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3529  {
3530  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3531  d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3532  }
3533 
3534 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3535  template <typename Dispatch>
3536  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
3538  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3539  {
3540  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3541 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3542  VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" );
3543 # endif
3544 
3545  d.vkDestroyPipelineLayout( m_device,
3546  static_cast<VkPipelineLayout>( pipelineLayout ),
3547  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3548  }
3549 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3550 
3551  template <typename Dispatch>
3552  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
3553  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3554  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3555  {
3556  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3557  d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3558  }
3559 
3560 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3561  template <typename Dispatch>
3562  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
3564  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3565  {
3566  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3567 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3568  VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" );
3569 # endif
3570 
3571  d.vkDestroyPipelineLayout( m_device,
3572  static_cast<VkPipelineLayout>( pipelineLayout ),
3573  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3574  }
3575 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3576 
3577  template <typename Dispatch>
3578  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
3579  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3580  VULKAN_HPP_NAMESPACE::Sampler * pSampler,
3581  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3582  {
3583  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3584  return static_cast<Result>( d.vkCreateSampler( m_device,
3585  reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ),
3586  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3587  reinterpret_cast<VkSampler *>( pSampler ) ) );
3588  }
3589 
3590 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3591  template <typename Dispatch>
3593  const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
3594  {
3595  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3596 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3597  VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" );
3598 # endif
3599 
3600  VULKAN_HPP_NAMESPACE::Sampler sampler;
3602  d.vkCreateSampler( m_device,
3603  reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
3604  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3605  reinterpret_cast<VkSampler *>( &sampler ) ) );
3606  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
3607 
3608  return createResultValueType( result, sampler );
3609  }
3610 
3611 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3612  template <typename Dispatch>
3614  const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
3615  {
3616  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3617 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3618  VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" );
3619 # endif
3620 
3621  VULKAN_HPP_NAMESPACE::Sampler sampler;
3623  d.vkCreateSampler( m_device,
3624  reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
3625  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3626  reinterpret_cast<VkSampler *>( &sampler ) ) );
3627  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" );
3628 
3629  return createResultValueType( result,
3631  }
3632 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3633 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3634 
3635  template <typename Dispatch>
3636  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
3637  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3638  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3639  {
3640  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3641  d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3642  }
3643 
3644 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3645  template <typename Dispatch>
3646  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
3648  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3649  {
3650  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3651 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3652  VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" );
3653 # endif
3654 
3655  d.vkDestroySampler( m_device,
3656  static_cast<VkSampler>( sampler ),
3657  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3658  }
3659 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3660 
3661  template <typename Dispatch>
3662  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
3663  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3664  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3665  {
3666  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3667  d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3668  }
3669 
3670 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3671  template <typename Dispatch>
3672  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
3674  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3675  {
3676  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3677 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3678  VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" );
3679 # endif
3680 
3681  d.vkDestroySampler( m_device,
3682  static_cast<VkSampler>( sampler ),
3683  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3684  }
3685 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3686 
3687  template <typename Dispatch>
3688  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
3689  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3690  VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
3691  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3692  {
3693  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3694  return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device,
3695  reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
3696  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3697  reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
3698  }
3699 
3700 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3701  template <typename Dispatch>
3703  Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
3705  Dispatch const & d ) const
3706  {
3707  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3708 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3709  VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" );
3710 # endif
3711 
3712  VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
3713  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout(
3714  m_device,
3715  reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
3716  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3717  reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
3718  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
3719 
3720  return createResultValueType( result, setLayout );
3721  }
3722 
3723 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3724  template <typename Dispatch>
3726  Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
3728  Dispatch const & d ) const
3729  {
3730  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3731 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3732  VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" );
3733 # endif
3734 
3735  VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
3736  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout(
3737  m_device,
3738  reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
3739  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3740  reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
3741  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" );
3742 
3743  return createResultValueType(
3745  }
3746 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3747 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3748 
3749  template <typename Dispatch>
3750  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
3751  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3752  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3753  {
3754  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3755  d.vkDestroyDescriptorSetLayout(
3756  m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3757  }
3758 
3759 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3760  template <typename Dispatch>
3761  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
3763  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3764  {
3765  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3766 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3767  VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" );
3768 # endif
3769 
3770  d.vkDestroyDescriptorSetLayout(
3771  m_device,
3772  static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
3773  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3774  }
3775 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3776 
3777  template <typename Dispatch>
3778  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
3779  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3780  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3781  {
3782  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3783  d.vkDestroyDescriptorSetLayout(
3784  m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3785  }
3786 
3787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3788  template <typename Dispatch>
3789  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
3791  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3792  {
3793  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3794 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3795  VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" );
3796 # endif
3797 
3798  d.vkDestroyDescriptorSetLayout(
3799  m_device,
3800  static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
3801  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3802  }
3803 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3804 
3805  template <typename Dispatch>
3806  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
3807  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3808  VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
3809  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3810  {
3811  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3812  return static_cast<Result>( d.vkCreateDescriptorPool( m_device,
3813  reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
3814  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3815  reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
3816  }
3817 
3818 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3819  template <typename Dispatch>
3821  Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
3823  Dispatch const & d ) const
3824  {
3825  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3826 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3827  VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" );
3828 # endif
3829 
3830  VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
3832  d.vkCreateDescriptorPool( m_device,
3833  reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
3834  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3835  reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
3836  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
3837 
3838  return createResultValueType( result, descriptorPool );
3839  }
3840 
3841 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3842  template <typename Dispatch>
3844  Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
3846  Dispatch const & d ) const
3847  {
3848  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3849 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3850  VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" );
3851 # endif
3852 
3853  VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
3855  d.vkCreateDescriptorPool( m_device,
3856  reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
3857  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3858  reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
3859  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" );
3860 
3861  return createResultValueType(
3862  result, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
3863  }
3864 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3865 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3866 
3867  template <typename Dispatch>
3868  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
3869  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3870  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3871  {
3872  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3873  d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3874  }
3875 
3876 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3877  template <typename Dispatch>
3878  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
3880  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3881  {
3882  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3883 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3884  VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" );
3885 # endif
3886 
3887  d.vkDestroyDescriptorPool( m_device,
3888  static_cast<VkDescriptorPool>( descriptorPool ),
3889  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3890  }
3891 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3892 
3893  template <typename Dispatch>
3894  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
3895  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3896  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3897  {
3898  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3899  d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3900  }
3901 
3902 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3903  template <typename Dispatch>
3904  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
3906  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3907  {
3908  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3909 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3910  VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" );
3911 # endif
3912 
3913  d.vkDestroyDescriptorPool( m_device,
3914  static_cast<VkDescriptorPool>( descriptorPool ),
3915  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3916  }
3917 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3918 
3919 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
3920  template <typename Dispatch>
3921  VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
3923  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3924  {
3925  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3926  return static_cast<Result>(
3927  d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
3928  }
3929 #else
3930  template <typename Dispatch>
3931  VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
3933  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3934  {
3935  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3936 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3937  VULKAN_HPP_ASSERT( d.vkResetDescriptorPool && "Function <vkResetDescriptorPool> requires <VK_VERSION_1_0>" );
3938 # endif
3939 
3940  d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
3941  }
3942 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
3943 
3944  template <typename Dispatch>
3945  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
3946  VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
3947  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3948  {
3949  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3950  return static_cast<Result>( d.vkAllocateDescriptorSets(
3951  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
3952  }
3953 
3954 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3955  template <typename DescriptorSetAllocator, typename Dispatch>
3957  Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
3958  {
3959  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3960 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3961  VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" );
3962 # endif
3963 
3964  std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
3965  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets(
3966  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
3967  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
3968 
3969  return createResultValueType( result, descriptorSets );
3970  }
3971 
3972  template <typename DescriptorSetAllocator,
3973  typename Dispatch,
3974  typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, VULKAN_HPP_NAMESPACE::DescriptorSet>::value, int>::type>
3976  Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
3977  DescriptorSetAllocator & descriptorSetAllocator,
3978  Dispatch const & d ) const
3979  {
3980  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3981 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
3982  VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" );
3983 # endif
3984 
3985  std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
3986  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets(
3987  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
3988  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
3989 
3990  return createResultValueType( result, descriptorSets );
3991  }
3992 
3993 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3994  template <typename Dispatch, typename DescriptorSetAllocator>
3997  Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
3998  {
3999  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4000 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4001  VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" );
4002 # endif
4003 
4004  std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
4005  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets(
4006  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
4007  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
4008  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
4009  uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
4010  PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
4011  for ( auto const & descriptorSet : descriptorSets )
4012  {
4013  uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
4014  }
4015  return createResultValueType( result, std::move( uniqueDescriptorSets ) );
4016  }
4017 
4018  template <
4019  typename Dispatch,
4020  typename DescriptorSetAllocator,
4021  typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>::value,
4022  int>::type>
4025  Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
4026  DescriptorSetAllocator & descriptorSetAllocator,
4027  Dispatch const & d ) const
4028  {
4029  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4030 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4031  VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" );
4032 # endif
4033 
4034  std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
4035  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets(
4036  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
4037  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
4038  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
4039  uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
4040  PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
4041  for ( auto const & descriptorSet : descriptorSets )
4042  {
4043  uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
4044  }
4045  return createResultValueType( result, std::move( uniqueDescriptorSets ) );
4046  }
4047 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
4048 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4049 
4050  template <typename Dispatch>
4051  VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
4052  uint32_t descriptorSetCount,
4053  const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
4054  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4055  {
4056  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4057  return static_cast<Result>( d.vkFreeDescriptorSets(
4058  m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
4059  }
4060 
4061 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4062  template <typename Dispatch>
4063  VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
4064  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
4065  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4066  {
4067  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4068 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4069  VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" );
4070 # endif
4071 
4072  d.vkFreeDescriptorSets(
4073  m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
4074  }
4075 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4076 
4077  template <typename Dispatch>
4078  VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
4079  uint32_t descriptorSetCount,
4080  const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
4081  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4082  {
4083  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4084  return static_cast<Result>( d.vkFreeDescriptorSets(
4085  m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
4086  }
4087 
4088 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4089  template <typename Dispatch>
4090  VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
4091  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
4092  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4093  {
4094  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4095 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4096  VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" );
4097 # endif
4098 
4099  d.vkFreeDescriptorSets(
4100  m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
4101  }
4102 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4103 
4104  template <typename Dispatch>
4105  VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount,
4106  const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
4107  uint32_t descriptorCopyCount,
4108  const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
4109  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4110  {
4111  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4112  d.vkUpdateDescriptorSets( m_device,
4113  descriptorWriteCount,
4114  reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ),
4115  descriptorCopyCount,
4116  reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
4117  }
4118 
4119 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4120  template <typename Dispatch>
4121  VULKAN_HPP_INLINE void
4122  Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
4123  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
4124  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4125  {
4126  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4127 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4128  VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSets && "Function <vkUpdateDescriptorSets> requires <VK_VERSION_1_0>" );
4129 # endif
4130 
4131  d.vkUpdateDescriptorSets( m_device,
4132  descriptorWrites.size(),
4133  reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
4134  descriptorCopies.size(),
4135  reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
4136  }
4137 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4138 
4139  template <typename Dispatch>
4140  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
4141  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4142  VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
4143  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4144  {
4145  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4146  return static_cast<Result>( d.vkCreateFramebuffer( m_device,
4147  reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ),
4148  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
4149  reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
4150  }
4151 
4152 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4153  template <typename Dispatch>
4155  Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
4157  Dispatch const & d ) const
4158  {
4159  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4160 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4161  VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" );
4162 # endif
4163 
4164  VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
4166  d.vkCreateFramebuffer( m_device,
4167  reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
4168  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
4169  reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
4170  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
4171 
4172  return createResultValueType( result, framebuffer );
4173  }
4174 
4175 # ifndef VULKAN_HPP_NO_SMART_HANDLE
4176  template <typename Dispatch>
4178  Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
4180  Dispatch const & d ) const
4181  {
4182  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4183 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4184  VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" );
4185 # endif
4186 
4187  VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
4189  d.vkCreateFramebuffer( m_device,
4190  reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
4191  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
4192  reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
4193  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" );
4194 
4195  return createResultValueType(
4196  result, UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
4197  }
4198 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
4199 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4200 
4201  template <typename Dispatch>
4202  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
4203  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4204  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4205  {
4206  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4207  d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4208  }
4209 
4210 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4211  template <typename Dispatch>
4212  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
4214  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4215  {
4216  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4217 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4218  VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" );
4219 # endif
4220 
4221  d.vkDestroyFramebuffer( m_device,
4222  static_cast<VkFramebuffer>( framebuffer ),
4223  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4224  }
4225 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4226 
4227  template <typename Dispatch>
4228  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
4229  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4230  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4231  {
4232  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4233  d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4234  }
4235 
4236 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4237  template <typename Dispatch>
4238  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
4240  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4241  {
4242  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4243 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4244  VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" );
4245 # endif
4246 
4247  d.vkDestroyFramebuffer( m_device,
4248  static_cast<VkFramebuffer>( framebuffer ),
4249  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4250  }
4251 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4252 
4253  template <typename Dispatch>
4254  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
4255  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4256  VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
4257  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4258  {
4259  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4260  return static_cast<Result>( d.vkCreateRenderPass( m_device,
4261  reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ),
4262  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
4263  reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
4264  }
4265 
4266 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4267  template <typename Dispatch>
4269  Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
4271  Dispatch const & d ) const
4272  {
4273  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4274 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4275  VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" );
4276 # endif
4277 
4278  VULKAN_HPP_NAMESPACE::RenderPass renderPass;
4280  d.vkCreateRenderPass( m_device,
4281  reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
4282  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
4283  reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
4284  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
4285 
4286  return createResultValueType( result, renderPass );
4287  }
4288 
4289 # ifndef VULKAN_HPP_NO_SMART_HANDLE
4290  template <typename Dispatch>
4292  Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
4294  Dispatch const & d ) const
4295  {
4296  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4297 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4298  VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" );
4299 # endif
4300 
4301  VULKAN_HPP_NAMESPACE::RenderPass renderPass;
4303  d.vkCreateRenderPass( m_device,
4304  reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
4305  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
4306  reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
4307  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" );
4308 
4309  return createResultValueType(
4310  result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
4311  }
4312 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
4313 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4314 
4315  template <typename Dispatch>
4316  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
4317  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4318  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4319  {
4320  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4321  d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4322  }
4323 
4324 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4325  template <typename Dispatch>
4326  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
4328  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4329  {
4330  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4331 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4332  VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" );
4333 # endif
4334 
4335  d.vkDestroyRenderPass( m_device,
4336  static_cast<VkRenderPass>( renderPass ),
4337  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4338  }
4339 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4340 
4341  template <typename Dispatch>
4342  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
4343  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4344  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4345  {
4346  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4347  d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4348  }
4349 
4350 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4351  template <typename Dispatch>
4352  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
4354  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4355  {
4356  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4357 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4358  VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" );
4359 # endif
4360 
4361  d.vkDestroyRenderPass( m_device,
4362  static_cast<VkRenderPass>( renderPass ),
4363  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4364  }
4365 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4366 
4367  template <typename Dispatch>
4368  VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
4369  VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
4370  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4371  {
4372  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4373  d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
4374  }
4375 
4376 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4377  template <typename Dispatch>
4378  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
4379  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4380  {
4381  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4382 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4383  VULKAN_HPP_ASSERT( d.vkGetRenderAreaGranularity && "Function <vkGetRenderAreaGranularity> requires <VK_VERSION_1_0>" );
4384 # endif
4385 
4386  VULKAN_HPP_NAMESPACE::Extent2D granularity;
4387  d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
4388 
4389  return granularity;
4390  }
4391 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4392 
4393  template <typename Dispatch>
4394  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
4395  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4396  VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
4397  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4398  {
4399  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4400  return static_cast<Result>( d.vkCreateCommandPool( m_device,
4401  reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ),
4402  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
4403  reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
4404  }
4405 
4406 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4407  template <typename Dispatch>
4409  Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
4411  Dispatch const & d ) const
4412  {
4413  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4414 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4415  VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" );
4416 # endif
4417 
4418  VULKAN_HPP_NAMESPACE::CommandPool commandPool;
4420  d.vkCreateCommandPool( m_device,
4421  reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
4422  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
4423  reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
4424  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
4425 
4426  return createResultValueType( result, commandPool );
4427  }
4428 
4429 # ifndef VULKAN_HPP_NO_SMART_HANDLE
4430  template <typename Dispatch>
4432  Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
4434  Dispatch const & d ) const
4435  {
4436  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4437 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4438  VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" );
4439 # endif
4440 
4441  VULKAN_HPP_NAMESPACE::CommandPool commandPool;
4443  d.vkCreateCommandPool( m_device,
4444  reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
4445  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
4446  reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
4447  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" );
4448 
4449  return createResultValueType(
4450  result, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
4451  }
4452 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
4453 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4454 
4455  template <typename Dispatch>
4456  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
4457  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4458  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4459  {
4460  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4461  d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4462  }
4463 
4464 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4465  template <typename Dispatch>
4466  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
4468  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4469  {
4470  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4471 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4472  VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" );
4473 # endif
4474 
4475  d.vkDestroyCommandPool( m_device,
4476  static_cast<VkCommandPool>( commandPool ),
4477  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4478  }
4479 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4480 
4481  template <typename Dispatch>
4482  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
4483  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4484  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4485  {
4486  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4487  d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4488  }
4489 
4490 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4491  template <typename Dispatch>
4492  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
4494  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4495  {
4496  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4497 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4498  VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" );
4499 # endif
4500 
4501  d.vkDestroyCommandPool( m_device,
4502  static_cast<VkCommandPool>( commandPool ),
4503  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4504  }
4505 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4506 
4507 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4508  template <typename Dispatch>
4509  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
4511  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4512  {
4513  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4514  return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
4515  }
4516 #else
4517  template <typename Dispatch>
4518  VULKAN_HPP_INLINE typename ResultValueType<void>::type
4519  Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const
4520  {
4521  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4522 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4523  VULKAN_HPP_ASSERT( d.vkResetCommandPool && "Function <vkResetCommandPool> requires <VK_VERSION_1_0>" );
4524 # endif
4525 
4527  d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
4528  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
4529 
4530  return createResultValueType( result );
4531  }
4532 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
4533 
4534  template <typename Dispatch>
4535  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
4536  VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
4537  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4538  {
4539  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4540  return static_cast<Result>( d.vkAllocateCommandBuffers(
4541  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
4542  }
4543 
4544 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4545  template <typename CommandBufferAllocator, typename Dispatch>
4547  Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
4548  {
4549  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4550 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4551  VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" );
4552 # endif
4553 
4554  std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
4555  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers(
4556  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
4557  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
4558 
4559  return createResultValueType( result, commandBuffers );
4560  }
4561 
4562  template <typename CommandBufferAllocator,
4563  typename Dispatch,
4564  typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, VULKAN_HPP_NAMESPACE::CommandBuffer>::value, int>::type>
4566  Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
4567  CommandBufferAllocator & commandBufferAllocator,
4568  Dispatch const & d ) const
4569  {
4570  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4571 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4572  VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" );
4573 # endif
4574 
4575  std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
4576  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers(
4577  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
4578  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
4579 
4580  return createResultValueType( result, commandBuffers );
4581  }
4582 
4583 # ifndef VULKAN_HPP_NO_SMART_HANDLE
4584  template <typename Dispatch, typename CommandBufferAllocator>
4587  Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
4588  {
4589  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4590 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4591  VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" );
4592 # endif
4593 
4594  std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
4595  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers(
4596  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
4597  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
4598  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
4599  uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
4600  PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
4601  for ( auto const & commandBuffer : commandBuffers )
4602  {
4603  uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
4604  }
4605  return createResultValueType( result, std::move( uniqueCommandBuffers ) );
4606  }
4607 
4608  template <
4609  typename Dispatch,
4610  typename CommandBufferAllocator,
4611  typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>::value,
4612  int>::type>
4615  Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
4616  CommandBufferAllocator & commandBufferAllocator,
4617  Dispatch const & d ) const
4618  {
4619  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4620 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4621  VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" );
4622 # endif
4623 
4624  std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
4625  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers(
4626  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
4627  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
4628  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
4629  uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
4630  PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
4631  for ( auto const & commandBuffer : commandBuffers )
4632  {
4633  uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
4634  }
4635  return createResultValueType( result, std::move( uniqueCommandBuffers ) );
4636  }
4637 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
4638 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4639 
4640  template <typename Dispatch>
4641  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
4642  uint32_t commandBufferCount,
4643  const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
4644  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4645  {
4646  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4647  d.vkFreeCommandBuffers(
4648  m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
4649  }
4650 
4651 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4652  template <typename Dispatch>
4653  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
4654  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
4655  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4656  {
4657  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4658 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4659  VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" );
4660 # endif
4661 
4662  d.vkFreeCommandBuffers(
4663  m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
4664  }
4665 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4666 
4667  template <typename Dispatch>
4668  VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
4669  uint32_t commandBufferCount,
4670  const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
4671  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4672  {
4673  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4674  d.vkFreeCommandBuffers(
4675  m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
4676  }
4677 
4678 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4679  template <typename Dispatch>
4680  VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
4681  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
4682  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4683  {
4684  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4685 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4686  VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" );
4687 # endif
4688 
4689  d.vkFreeCommandBuffers(
4690  m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
4691  }
4692 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4693 
4694  template <typename Dispatch>
4695  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,
4696  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4697  {
4698  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4699  return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
4700  }
4701 
4702 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4703  template <typename Dispatch>
4705  CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
4706  {
4707  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4708 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4709  VULKAN_HPP_ASSERT( d.vkBeginCommandBuffer && "Function <vkBeginCommandBuffer> requires <VK_VERSION_1_0>" );
4710 # endif
4711 
4713  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) );
4714  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
4715 
4716  return createResultValueType( result );
4717  }
4718 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4719 
4720 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4721  template <typename Dispatch>
4722  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4723  {
4724  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4725  return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
4726  }
4727 #else
4728  template <typename Dispatch>
4729  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const
4730  {
4731  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4732 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4733  VULKAN_HPP_ASSERT( d.vkEndCommandBuffer && "Function <vkEndCommandBuffer> requires <VK_VERSION_1_0>" );
4734 # endif
4735 
4736  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
4737  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
4738 
4739  return createResultValueType( result );
4740  }
4741 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
4742 
4743 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4744  template <typename Dispatch>
4746  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4747  {
4748  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4749  return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
4750  }
4751 #else
4752  template <typename Dispatch>
4753  VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
4754  {
4755  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4756 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4757  VULKAN_HPP_ASSERT( d.vkResetCommandBuffer && "Function <vkResetCommandBuffer> requires <VK_VERSION_1_0>" );
4758 # endif
4759 
4761  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
4762  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
4763 
4764  return createResultValueType( result );
4765  }
4766 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
4767 
4768  template <typename Dispatch>
4769  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
4770  VULKAN_HPP_NAMESPACE::Pipeline pipeline,
4771  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4772  {
4773  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4774  d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
4775  }
4776 
4777  template <typename Dispatch>
4778  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
4779  uint32_t viewportCount,
4780  const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
4781  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4782  {
4783  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4784  d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
4785  }
4786 
4787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4788  template <typename Dispatch>
4789  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
4790  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
4791  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4792  {
4793  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4794 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4795  VULKAN_HPP_ASSERT( d.vkCmdSetViewport && "Function <vkCmdSetViewport> requires <VK_VERSION_1_0>" );
4796 # endif
4797 
4798  d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
4799  }
4800 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4801 
4802  template <typename Dispatch>
4803  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
4804  uint32_t scissorCount,
4805  const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
4806  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4807  {
4808  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4809  d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
4810  }
4811 
4812 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4813  template <typename Dispatch>
4814  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
4815  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
4816  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4817  {
4818  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4819 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4820  VULKAN_HPP_ASSERT( d.vkCmdSetScissor && "Function <vkCmdSetScissor> requires <VK_VERSION_1_0>" );
4821 # endif
4822 
4823  d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
4824  }
4825 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4826 
4827  template <typename Dispatch>
4828  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4829  {
4830  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4831  d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
4832  }
4833 
4834  template <typename Dispatch>
4835  VULKAN_HPP_INLINE void
4836  CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4837  {
4838  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4839  d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
4840  }
4841 
4842  template <typename Dispatch>
4843  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4844  {
4845  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4846  d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
4847  }
4848 
4849  template <typename Dispatch>
4850  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4851  {
4852  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4853  d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
4854  }
4855 
4856  template <typename Dispatch>
4857  VULKAN_HPP_INLINE void
4858  CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4859  {
4860  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4861  d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
4862  }
4863 
4864  template <typename Dispatch>
4865  VULKAN_HPP_INLINE void
4866  CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4867  {
4868  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4869  d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
4870  }
4871 
4872  template <typename Dispatch>
4873  VULKAN_HPP_INLINE void
4874  CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4875  {
4876  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4877  d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
4878  }
4879 
4880  template <typename Dispatch>
4881  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
4882  VULKAN_HPP_NAMESPACE::PipelineLayout layout,
4883  uint32_t firstSet,
4884  uint32_t descriptorSetCount,
4885  const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
4886  uint32_t dynamicOffsetCount,
4887  const uint32_t * pDynamicOffsets,
4888  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4889  {
4890  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4891  d.vkCmdBindDescriptorSets( m_commandBuffer,
4892  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
4893  static_cast<VkPipelineLayout>( layout ),
4894  firstSet,
4895  descriptorSetCount,
4896  reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ),
4897  dynamicOffsetCount,
4898  pDynamicOffsets );
4899  }
4900 
4901 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4902  template <typename Dispatch>
4903  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
4904  VULKAN_HPP_NAMESPACE::PipelineLayout layout,
4905  uint32_t firstSet,
4906  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
4907  VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,
4908  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4909  {
4910  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4911 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4912  VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets && "Function <vkCmdBindDescriptorSets> requires <VK_VERSION_1_0>" );
4913 # endif
4914 
4915  d.vkCmdBindDescriptorSets( m_commandBuffer,
4916  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
4917  static_cast<VkPipelineLayout>( layout ),
4918  firstSet,
4919  descriptorSets.size(),
4920  reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ),
4921  dynamicOffsets.size(),
4922  dynamicOffsets.data() );
4923  }
4924 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4925 
4926  template <typename Dispatch>
4927  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
4930  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4931  {
4932  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4933  d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
4934  }
4935 
4936  template <typename Dispatch>
4937  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
4938  uint32_t bindingCount,
4939  const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
4940  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
4941  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4942  {
4943  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4944  d.vkCmdBindVertexBuffers(
4945  m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
4946  }
4947 
4948 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4949  template <typename Dispatch>
4950  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
4951  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
4952  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
4953  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
4954  {
4955  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4956 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
4957  VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers && "Function <vkCmdBindVertexBuffers> requires <VK_VERSION_1_0>" );
4958 # endif
4959 # ifdef VULKAN_HPP_NO_EXCEPTIONS
4960  VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
4961 # else
4962  if ( buffers.size() != offsets.size() )
4963  {
4964  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
4965  }
4966 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
4967 
4968  d.vkCmdBindVertexBuffers( m_commandBuffer,
4969  firstBinding,
4970  buffers.size(),
4971  reinterpret_cast<const VkBuffer *>( buffers.data() ),
4972  reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
4973  }
4974 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4975 
4976  template <typename Dispatch>
4977  VULKAN_HPP_INLINE void CommandBuffer::draw(
4978  uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4979  {
4980  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4981  d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
4982  }
4983 
4984  template <typename Dispatch>
4985  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount,
4986  uint32_t instanceCount,
4987  uint32_t firstIndex,
4988  int32_t vertexOffset,
4989  uint32_t firstInstance,
4990  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4991  {
4992  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4993  d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
4994  }
4995 
4996  template <typename Dispatch>
4997  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
4999  uint32_t drawCount,
5000  uint32_t stride,
5001  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5002  {
5003  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5004  d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
5005  }
5006 
5007  template <typename Dispatch>
5008  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
5010  uint32_t drawCount,
5011  uint32_t stride,
5012  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5013  {
5014  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5015  d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
5016  }
5017 
5018  template <typename Dispatch>
5019  VULKAN_HPP_INLINE void
5020  CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5021  {
5022  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5023  d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
5024  }
5025 
5026  template <typename Dispatch>
5027  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
5029  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5030  {
5031  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5032  d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
5033  }
5034 
5035  template <typename Dispatch>
5036  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
5037  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
5038  uint32_t regionCount,
5039  const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
5040  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5041  {
5042  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5043  d.vkCmdCopyBuffer( m_commandBuffer,
5044  static_cast<VkBuffer>( srcBuffer ),
5045  static_cast<VkBuffer>( dstBuffer ),
5046  regionCount,
5047  reinterpret_cast<const VkBufferCopy *>( pRegions ) );
5048  }
5049 
5050 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5051  template <typename Dispatch>
5052  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
5053  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
5054  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
5055  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5056  {
5057  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5058 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5059  VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer && "Function <vkCmdCopyBuffer> requires <VK_VERSION_1_0>" );
5060 # endif
5061 
5062  d.vkCmdCopyBuffer( m_commandBuffer,
5063  static_cast<VkBuffer>( srcBuffer ),
5064  static_cast<VkBuffer>( dstBuffer ),
5065  regions.size(),
5066  reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
5067  }
5068 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5069 
5070  template <typename Dispatch>
5071  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
5072  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
5073  VULKAN_HPP_NAMESPACE::Image dstImage,
5074  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
5075  uint32_t regionCount,
5076  const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
5077  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5078  {
5079  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5080  d.vkCmdCopyImage( m_commandBuffer,
5081  static_cast<VkImage>( srcImage ),
5082  static_cast<VkImageLayout>( srcImageLayout ),
5083  static_cast<VkImage>( dstImage ),
5084  static_cast<VkImageLayout>( dstImageLayout ),
5085  regionCount,
5086  reinterpret_cast<const VkImageCopy *>( pRegions ) );
5087  }
5088 
5089 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5090  template <typename Dispatch>
5091  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
5092  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
5093  VULKAN_HPP_NAMESPACE::Image dstImage,
5094  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
5095  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
5096  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5097  {
5098  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5099 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5100  VULKAN_HPP_ASSERT( d.vkCmdCopyImage && "Function <vkCmdCopyImage> requires <VK_VERSION_1_0>" );
5101 # endif
5102 
5103  d.vkCmdCopyImage( m_commandBuffer,
5104  static_cast<VkImage>( srcImage ),
5105  static_cast<VkImageLayout>( srcImageLayout ),
5106  static_cast<VkImage>( dstImage ),
5107  static_cast<VkImageLayout>( dstImageLayout ),
5108  regions.size(),
5109  reinterpret_cast<const VkImageCopy *>( regions.data() ) );
5110  }
5111 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5112 
5113  template <typename Dispatch>
5114  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
5115  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
5116  VULKAN_HPP_NAMESPACE::Image dstImage,
5117  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
5118  uint32_t regionCount,
5119  const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,
5121  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5122  {
5123  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5124  d.vkCmdBlitImage( m_commandBuffer,
5125  static_cast<VkImage>( srcImage ),
5126  static_cast<VkImageLayout>( srcImageLayout ),
5127  static_cast<VkImage>( dstImage ),
5128  static_cast<VkImageLayout>( dstImageLayout ),
5129  regionCount,
5130  reinterpret_cast<const VkImageBlit *>( pRegions ),
5131  static_cast<VkFilter>( filter ) );
5132  }
5133 
5134 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5135  template <typename Dispatch>
5136  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
5137  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
5138  VULKAN_HPP_NAMESPACE::Image dstImage,
5139  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
5140  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
5142  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5143  {
5144  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5145 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5146  VULKAN_HPP_ASSERT( d.vkCmdBlitImage && "Function <vkCmdBlitImage> requires <VK_VERSION_1_0>" );
5147 # endif
5148 
5149  d.vkCmdBlitImage( m_commandBuffer,
5150  static_cast<VkImage>( srcImage ),
5151  static_cast<VkImageLayout>( srcImageLayout ),
5152  static_cast<VkImage>( dstImage ),
5153  static_cast<VkImageLayout>( dstImageLayout ),
5154  regions.size(),
5155  reinterpret_cast<const VkImageBlit *>( regions.data() ),
5156  static_cast<VkFilter>( filter ) );
5157  }
5158 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5159 
5160  template <typename Dispatch>
5161  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
5162  VULKAN_HPP_NAMESPACE::Image dstImage,
5163  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
5164  uint32_t regionCount,
5165  const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
5166  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5167  {
5168  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5169  d.vkCmdCopyBufferToImage( m_commandBuffer,
5170  static_cast<VkBuffer>( srcBuffer ),
5171  static_cast<VkImage>( dstImage ),
5172  static_cast<VkImageLayout>( dstImageLayout ),
5173  regionCount,
5174  reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
5175  }
5176 
5177 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5178  template <typename Dispatch>
5179  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
5180  VULKAN_HPP_NAMESPACE::Image dstImage,
5181  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
5182  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
5183  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5184  {
5185  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5186 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5187  VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage && "Function <vkCmdCopyBufferToImage> requires <VK_VERSION_1_0>" );
5188 # endif
5189 
5190  d.vkCmdCopyBufferToImage( m_commandBuffer,
5191  static_cast<VkBuffer>( srcBuffer ),
5192  static_cast<VkImage>( dstImage ),
5193  static_cast<VkImageLayout>( dstImageLayout ),
5194  regions.size(),
5195  reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
5196  }
5197 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5198 
5199  template <typename Dispatch>
5200  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
5201  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
5202  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
5203  uint32_t regionCount,
5204  const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
5205  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5206  {
5207  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5208  d.vkCmdCopyImageToBuffer( m_commandBuffer,
5209  static_cast<VkImage>( srcImage ),
5210  static_cast<VkImageLayout>( srcImageLayout ),
5211  static_cast<VkBuffer>( dstBuffer ),
5212  regionCount,
5213  reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
5214  }
5215 
5216 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5217  template <typename Dispatch>
5218  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
5219  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
5220  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
5221  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
5222  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5223  {
5224  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5225 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5226  VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer && "Function <vkCmdCopyImageToBuffer> requires <VK_VERSION_1_0>" );
5227 # endif
5228 
5229  d.vkCmdCopyImageToBuffer( m_commandBuffer,
5230  static_cast<VkImage>( srcImage ),
5231  static_cast<VkImageLayout>( srcImageLayout ),
5232  static_cast<VkBuffer>( dstBuffer ),
5233  regions.size(),
5234  reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
5235  }
5236 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5237 
5238  template <typename Dispatch>
5239  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
5242  const void * pData,
5243  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5244  {
5245  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5246  d.vkCmdUpdateBuffer(
5247  m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
5248  }
5249 
5250 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5251  template <typename DataType, typename Dispatch>
5252  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
5254  VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data,
5255  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5256  {
5257  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5258 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5259  VULKAN_HPP_ASSERT( d.vkCmdUpdateBuffer && "Function <vkCmdUpdateBuffer> requires <VK_VERSION_1_0>" );
5260 # endif
5261 
5262  d.vkCmdUpdateBuffer( m_commandBuffer,
5263  static_cast<VkBuffer>( dstBuffer ),
5264  static_cast<VkDeviceSize>( dstOffset ),
5265  data.size() * sizeof( DataType ),
5266  reinterpret_cast<const void *>( data.data() ) );
5267  }
5268 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5269 
5270  template <typename Dispatch>
5271  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
5274  uint32_t data,
5275  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5276  {
5277  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5278  d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
5279  }
5280 
5281  template <typename Dispatch>
5282  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
5284  const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,
5285  uint32_t rangeCount,
5286  const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
5287  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5288  {
5289  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5290  d.vkCmdClearColorImage( m_commandBuffer,
5291  static_cast<VkImage>( image ),
5292  static_cast<VkImageLayout>( imageLayout ),
5293  reinterpret_cast<const VkClearColorValue *>( pColor ),
5294  rangeCount,
5295  reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
5296  }
5297 
5298 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5299  template <typename Dispatch>
5300  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
5302  const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
5303  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
5304  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5305  {
5306  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5307 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5308  VULKAN_HPP_ASSERT( d.vkCmdClearColorImage && "Function <vkCmdClearColorImage> requires <VK_VERSION_1_0>" );
5309 # endif
5310 
5311  d.vkCmdClearColorImage( m_commandBuffer,
5312  static_cast<VkImage>( image ),
5313  static_cast<VkImageLayout>( imageLayout ),
5314  reinterpret_cast<const VkClearColorValue *>( &color ),
5315  ranges.size(),
5316  reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
5317  }
5318 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5319 
5320  template <typename Dispatch>
5321  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
5323  const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
5324  uint32_t rangeCount,
5325  const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
5326  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5327  {
5328  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5329  d.vkCmdClearDepthStencilImage( m_commandBuffer,
5330  static_cast<VkImage>( image ),
5331  static_cast<VkImageLayout>( imageLayout ),
5332  reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ),
5333  rangeCount,
5334  reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
5335  }
5336 
5337 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5338  template <typename Dispatch>
5339  VULKAN_HPP_INLINE void
5340  CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
5342  const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
5343  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
5344  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5345  {
5346  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5347 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5348  VULKAN_HPP_ASSERT( d.vkCmdClearDepthStencilImage && "Function <vkCmdClearDepthStencilImage> requires <VK_VERSION_1_0>" );
5349 # endif
5350 
5351  d.vkCmdClearDepthStencilImage( m_commandBuffer,
5352  static_cast<VkImage>( image ),
5353  static_cast<VkImageLayout>( imageLayout ),
5354  reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
5355  ranges.size(),
5356  reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
5357  }
5358 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5359 
5360  template <typename Dispatch>
5361  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount,
5362  const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
5363  uint32_t rectCount,
5364  const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
5365  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5366  {
5367  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5368  d.vkCmdClearAttachments( m_commandBuffer,
5369  attachmentCount,
5370  reinterpret_cast<const VkClearAttachment *>( pAttachments ),
5371  rectCount,
5372  reinterpret_cast<const VkClearRect *>( pRects ) );
5373  }
5374 
5375 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5376  template <typename Dispatch>
5377  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
5378  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
5379  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5380  {
5381  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5382 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5383  VULKAN_HPP_ASSERT( d.vkCmdClearAttachments && "Function <vkCmdClearAttachments> requires <VK_VERSION_1_0>" );
5384 # endif
5385 
5386  d.vkCmdClearAttachments( m_commandBuffer,
5387  attachments.size(),
5388  reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
5389  rects.size(),
5390  reinterpret_cast<const VkClearRect *>( rects.data() ) );
5391  }
5392 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5393 
5394  template <typename Dispatch>
5395  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
5396  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
5397  VULKAN_HPP_NAMESPACE::Image dstImage,
5398  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
5399  uint32_t regionCount,
5400  const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
5401  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5402  {
5403  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5404  d.vkCmdResolveImage( m_commandBuffer,
5405  static_cast<VkImage>( srcImage ),
5406  static_cast<VkImageLayout>( srcImageLayout ),
5407  static_cast<VkImage>( dstImage ),
5408  static_cast<VkImageLayout>( dstImageLayout ),
5409  regionCount,
5410  reinterpret_cast<const VkImageResolve *>( pRegions ) );
5411  }
5412 
5413 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5414  template <typename Dispatch>
5415  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
5416  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
5417  VULKAN_HPP_NAMESPACE::Image dstImage,
5418  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
5419  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
5420  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5421  {
5422  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5423 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5424  VULKAN_HPP_ASSERT( d.vkCmdResolveImage && "Function <vkCmdResolveImage> requires <VK_VERSION_1_0>" );
5425 # endif
5426 
5427  d.vkCmdResolveImage( m_commandBuffer,
5428  static_cast<VkImage>( srcImage ),
5429  static_cast<VkImageLayout>( srcImageLayout ),
5430  static_cast<VkImage>( dstImage ),
5431  static_cast<VkImageLayout>( dstImageLayout ),
5432  regions.size(),
5433  reinterpret_cast<const VkImageResolve *>( regions.data() ) );
5434  }
5435 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5436 
5437  template <typename Dispatch>
5438  VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event,
5440  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5441  {
5442  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5443  d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
5444  }
5445 
5446  template <typename Dispatch>
5447  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
5449  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5450  {
5451  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5452  d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
5453  }
5454 
5455  template <typename Dispatch>
5456  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount,
5457  const VULKAN_HPP_NAMESPACE::Event * pEvents,
5460  uint32_t memoryBarrierCount,
5461  const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
5462  uint32_t bufferMemoryBarrierCount,
5463  const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
5464  uint32_t imageMemoryBarrierCount,
5465  const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
5466  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5467  {
5468  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5469  d.vkCmdWaitEvents( m_commandBuffer,
5470  eventCount,
5471  reinterpret_cast<const VkEvent *>( pEvents ),
5472  static_cast<VkPipelineStageFlags>( srcStageMask ),
5473  static_cast<VkPipelineStageFlags>( dstStageMask ),
5474  memoryBarrierCount,
5475  reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
5476  bufferMemoryBarrierCount,
5477  reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
5478  imageMemoryBarrierCount,
5479  reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
5480  }
5481 
5482 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5483  template <typename Dispatch>
5484  VULKAN_HPP_INLINE void
5485  CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
5488  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
5489  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
5490  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
5491  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5492  {
5493  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5494 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5495  VULKAN_HPP_ASSERT( d.vkCmdWaitEvents && "Function <vkCmdWaitEvents> requires <VK_VERSION_1_0>" );
5496 # endif
5497 
5498  d.vkCmdWaitEvents( m_commandBuffer,
5499  events.size(),
5500  reinterpret_cast<const VkEvent *>( events.data() ),
5501  static_cast<VkPipelineStageFlags>( srcStageMask ),
5502  static_cast<VkPipelineStageFlags>( dstStageMask ),
5503  memoryBarriers.size(),
5504  reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
5505  bufferMemoryBarriers.size(),
5506  reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
5507  imageMemoryBarriers.size(),
5508  reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
5509  }
5510 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5511 
5512  template <typename Dispatch>
5513  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
5515  VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
5516  uint32_t memoryBarrierCount,
5517  const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
5518  uint32_t bufferMemoryBarrierCount,
5519  const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
5520  uint32_t imageMemoryBarrierCount,
5521  const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
5522  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5523  {
5524  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5525  d.vkCmdPipelineBarrier( m_commandBuffer,
5526  static_cast<VkPipelineStageFlags>( srcStageMask ),
5527  static_cast<VkPipelineStageFlags>( dstStageMask ),
5528  static_cast<VkDependencyFlags>( dependencyFlags ),
5529  memoryBarrierCount,
5530  reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
5531  bufferMemoryBarrierCount,
5532  reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
5533  imageMemoryBarrierCount,
5534  reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
5535  }
5536 
5537 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5538  template <typename Dispatch>
5539  VULKAN_HPP_INLINE void
5540  CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
5542  VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
5543  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
5544  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
5545  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
5546  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5547  {
5548  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5549 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5550  VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier && "Function <vkCmdPipelineBarrier> requires <VK_VERSION_1_0>" );
5551 # endif
5552 
5553  d.vkCmdPipelineBarrier( m_commandBuffer,
5554  static_cast<VkPipelineStageFlags>( srcStageMask ),
5555  static_cast<VkPipelineStageFlags>( dstStageMask ),
5556  static_cast<VkDependencyFlags>( dependencyFlags ),
5557  memoryBarriers.size(),
5558  reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
5559  bufferMemoryBarriers.size(),
5560  reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
5561  imageMemoryBarriers.size(),
5562  reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
5563  }
5564 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5565 
5566  template <typename Dispatch>
5567  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
5568  uint32_t query,
5570  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5571  {
5572  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5573  d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
5574  }
5575 
5576  template <typename Dispatch>
5577  VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5578  {
5579  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5580  d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
5581  }
5582 
5583  template <typename Dispatch>
5584  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
5585  uint32_t firstQuery,
5586  uint32_t queryCount,
5587  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5588  {
5589  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5590  d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
5591  }
5592 
5593  template <typename Dispatch>
5594  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
5595  VULKAN_HPP_NAMESPACE::QueryPool queryPool,
5596  uint32_t query,
5597  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5598  {
5599  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5600  d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
5601  }
5602 
5603  template <typename Dispatch>
5604  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
5605  uint32_t firstQuery,
5606  uint32_t queryCount,
5607  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
5611  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5612  {
5613  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5614  d.vkCmdCopyQueryPoolResults( m_commandBuffer,
5615  static_cast<VkQueryPool>( queryPool ),
5616  firstQuery,
5617  queryCount,
5618  static_cast<VkBuffer>( dstBuffer ),
5619  static_cast<VkDeviceSize>( dstOffset ),
5620  static_cast<VkDeviceSize>( stride ),
5621  static_cast<VkQueryResultFlags>( flags ) );
5622  }
5623 
5624  template <typename Dispatch>
5625  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
5627  uint32_t offset,
5628  uint32_t size,
5629  const void * pValues,
5630  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5631  {
5632  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5633  d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
5634  }
5635 
5636 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5637  template <typename ValuesType, typename Dispatch>
5638  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
5640  uint32_t offset,
5641  VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values,
5642  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5643  {
5644  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5645 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5646  VULKAN_HPP_ASSERT( d.vkCmdPushConstants && "Function <vkCmdPushConstants> requires <VK_VERSION_1_0>" );
5647 # endif
5648 
5649  d.vkCmdPushConstants( m_commandBuffer,
5650  static_cast<VkPipelineLayout>( layout ),
5651  static_cast<VkShaderStageFlags>( stageFlags ),
5652  offset,
5653  values.size() * sizeof( ValuesType ),
5654  reinterpret_cast<const void *>( values.data() ) );
5655  }
5656 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5657 
5658  template <typename Dispatch>
5659  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
5661  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5662  {
5663  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5664  d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
5665  }
5666 
5667 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5668  template <typename Dispatch>
5669  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
5671  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5672  {
5673  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5674 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5675  VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass && "Function <vkCmdBeginRenderPass> requires <VK_VERSION_1_0>" );
5676 # endif
5677 
5678  d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
5679  }
5680 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5681 
5682  template <typename Dispatch>
5683  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5684  {
5685  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5686  d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
5687  }
5688 
5689  template <typename Dispatch>
5690  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5691  {
5692  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5693  d.vkCmdEndRenderPass( m_commandBuffer );
5694  }
5695 
5696  template <typename Dispatch>
5697  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount,
5698  const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
5699  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5700  {
5701  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5702  d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
5703  }
5704 
5705 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5706  template <typename Dispatch>
5707  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
5708  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5709  {
5710  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5711 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5712  VULKAN_HPP_ASSERT( d.vkCmdExecuteCommands && "Function <vkCmdExecuteCommands> requires <VK_VERSION_1_0>" );
5713 # endif
5714 
5715  d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
5716  }
5717 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5718 
5719  //=== VK_VERSION_1_1 ===
5720 
5721  template <typename Dispatch>
5723  {
5724  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5725  return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
5726  }
5727 
5728 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5729  template <typename Dispatch>
5731  {
5732  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5733 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5734  VULKAN_HPP_ASSERT( d.vkEnumerateInstanceVersion && "Function <vkEnumerateInstanceVersion> requires <VK_VERSION_1_1>" );
5735 # endif
5736 
5737  uint32_t apiVersion;
5738  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
5739  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
5740 
5741  return createResultValueType( result, apiVersion );
5742  }
5743 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5744 
5745  template <typename Dispatch>
5746  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount,
5747  const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
5748  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5749  {
5750  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5751  return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
5752  }
5753 
5754 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5755  template <typename Dispatch>
5757  Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
5758  {
5759  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5760 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5761  VULKAN_HPP_ASSERT( d.vkBindBufferMemory2 && "Function <vkBindBufferMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
5762 # endif
5763 
5765  d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
5766  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
5767 
5768  return createResultValueType( result );
5769  }
5770 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5771 
5772  template <typename Dispatch>
5773  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount,
5774  const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
5775  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5776  {
5777  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5778  return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
5779  }
5780 
5781 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5782  template <typename Dispatch>
5784  Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
5785  {
5786  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5787 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5788  VULKAN_HPP_ASSERT( d.vkBindImageMemory2 && "Function <vkBindImageMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
5789 # endif
5790 
5792  d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
5793  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
5794 
5795  return createResultValueType( result );
5796  }
5797 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5798 
5799  template <typename Dispatch>
5800  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
5801  uint32_t localDeviceIndex,
5802  uint32_t remoteDeviceIndex,
5803  VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
5804  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5805  {
5806  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5807  d.vkGetDeviceGroupPeerMemoryFeatures(
5808  m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
5809  }
5810 
5811 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5812  template <typename Dispatch>
5814  uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5815  {
5816  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5817 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5818  VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeatures &&
5819  "Function <vkGetDeviceGroupPeerMemoryFeatures> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
5820 # endif
5821 
5823  d.vkGetDeviceGroupPeerMemoryFeatures(
5824  m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
5825 
5826  return peerMemoryFeatures;
5827  }
5828 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5829 
5830  template <typename Dispatch>
5831  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5832  {
5833  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5834  d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
5835  }
5836 
5837  template <typename Dispatch>
5838  VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX,
5839  uint32_t baseGroupY,
5840  uint32_t baseGroupZ,
5841  uint32_t groupCountX,
5842  uint32_t groupCountY,
5843  uint32_t groupCountZ,
5844  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5845  {
5846  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5847  d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
5848  }
5849 
5850  template <typename Dispatch>
5852  Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount,
5853  VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
5854  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5855  {
5856  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5857  return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
5858  m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
5859  }
5860 
5861 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5862  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
5865  Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
5866  {
5867  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5868 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5869  VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups &&
5870  "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" );
5871 # endif
5872 
5873  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
5874  uint32_t physicalDeviceGroupCount;
5876  do
5877  {
5878  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
5879  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount )
5880  {
5881  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5882  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups(
5883  m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
5884  }
5885  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
5886  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
5887  VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
5888  if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
5889  {
5890  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5891  }
5892  return createResultValueType( result, physicalDeviceGroupProperties );
5893  }
5894 
5895  template <typename PhysicalDeviceGroupPropertiesAllocator,
5896  typename Dispatch,
5897  typename std::enable_if<
5898  std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value,
5899  int>::type>
5902  Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
5903  {
5904  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5905 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5906  VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups &&
5907  "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" );
5908 # endif
5909 
5910  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
5911  physicalDeviceGroupPropertiesAllocator );
5912  uint32_t physicalDeviceGroupCount;
5914  do
5915  {
5916  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
5917  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount )
5918  {
5919  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5920  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups(
5921  m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
5922  }
5923  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
5924  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
5925  VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
5926  if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
5927  {
5928  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5929  }
5930  return createResultValueType( result, physicalDeviceGroupProperties );
5931  }
5932 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5933 
5934  template <typename Dispatch>
5935  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
5936  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
5937  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5938  {
5939  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5940  d.vkGetImageMemoryRequirements2(
5941  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
5942  }
5943 
5944 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5945  template <typename Dispatch>
5946  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
5947  Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5948  {
5949  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5950 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5951  VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 &&
5952  "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
5953 # endif
5954 
5955  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
5956  d.vkGetImageMemoryRequirements2(
5957  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5958 
5959  return memoryRequirements;
5960  }
5961 
5962  template <typename X, typename Y, typename... Z, typename Dispatch>
5963  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
5964  Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5965  {
5966  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5967 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5968  VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 &&
5969  "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
5970 # endif
5971 
5972  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
5973  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
5974  d.vkGetImageMemoryRequirements2(
5975  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5976 
5977  return structureChain;
5978  }
5979 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5980 
5981  template <typename Dispatch>
5982  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
5983  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
5984  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5985  {
5986  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5987  d.vkGetBufferMemoryRequirements2(
5988  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
5989  }
5990 
5991 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5992  template <typename Dispatch>
5993  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
5994  Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5995  {
5996  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5997 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
5998  VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 &&
5999  "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
6000 # endif
6001 
6002  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
6003  d.vkGetBufferMemoryRequirements2(
6004  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
6005 
6006  return memoryRequirements;
6007  }
6008 
6009  template <typename X, typename Y, typename... Z, typename Dispatch>
6010  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
6011  Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6012  {
6013  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6014 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6015  VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 &&
6016  "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
6017 # endif
6018 
6019  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
6020  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
6021  d.vkGetBufferMemoryRequirements2(
6022  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
6023 
6024  return structureChain;
6025  }
6026 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6027 
6028  template <typename Dispatch>
6029  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
6030  uint32_t * pSparseMemoryRequirementCount,
6031  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
6032  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6033  {
6034  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6035  d.vkGetImageSparseMemoryRequirements2( m_device,
6036  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
6037  pSparseMemoryRequirementCount,
6038  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
6039  }
6040 
6041 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6042  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
6043  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
6044  Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
6045  {
6046  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6047 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6048  VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 &&
6049  "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
6050 # endif
6051 
6052  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
6053  uint32_t sparseMemoryRequirementCount;
6054  d.vkGetImageSparseMemoryRequirements2(
6055  m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
6056  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
6057  d.vkGetImageSparseMemoryRequirements2( m_device,
6058  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
6059  &sparseMemoryRequirementCount,
6060  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
6061 
6062  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
6063  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
6064  {
6065  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
6066  }
6067  return sparseMemoryRequirements;
6068  }
6069 
6070  template <typename SparseImageMemoryRequirements2Allocator,
6071  typename Dispatch,
6072  typename std::enable_if<
6073  std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value,
6074  int>::type>
6075  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
6076  Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
6077  SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
6078  Dispatch const & d ) const
6079  {
6080  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6081 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6082  VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 &&
6083  "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
6084 # endif
6085 
6086  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
6087  sparseImageMemoryRequirements2Allocator );
6088  uint32_t sparseMemoryRequirementCount;
6089  d.vkGetImageSparseMemoryRequirements2(
6090  m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
6091  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
6092  d.vkGetImageSparseMemoryRequirements2( m_device,
6093  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
6094  &sparseMemoryRequirementCount,
6095  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
6096 
6097  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
6098  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
6099  {
6100  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
6101  }
6102  return sparseMemoryRequirements;
6103  }
6104 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6105 
6106  template <typename Dispatch>
6107  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6108  {
6109  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6110  d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
6111  }
6112 
6113 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6114  template <typename Dispatch>
6115  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
6116  PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6117  {
6118  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6119 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6120  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 &&
6121  "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6122 # endif
6123 
6124  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
6125  d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
6126 
6127  return features;
6128  }
6129 
6130  template <typename X, typename Y, typename... Z, typename Dispatch>
6131  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
6132  PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6133  {
6134  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6135 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6136  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 &&
6137  "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6138 # endif
6139 
6140  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
6141  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
6142  d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
6143 
6144  return structureChain;
6145  }
6146 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6147 
6148  template <typename Dispatch>
6149  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
6150  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6151  {
6152  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6153  d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
6154  }
6155 
6156 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6157  template <typename Dispatch>
6158  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
6159  PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6160  {
6161  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6162 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6163  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 &&
6164  "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6165 # endif
6166 
6167  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
6168  d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
6169 
6170  return properties;
6171  }
6172 
6173  template <typename X, typename Y, typename... Z, typename Dispatch>
6174  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
6175  PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6176  {
6177  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6178 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6179  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 &&
6180  "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6181 # endif
6182 
6183  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
6184  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
6185  d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
6186 
6187  return structureChain;
6188  }
6189 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6190 
6191  template <typename Dispatch>
6192  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
6193  VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
6194  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6195  {
6196  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6197  d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
6198  }
6199 
6200 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6201  template <typename Dispatch>
6202  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
6203  PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6204  {
6205  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6206 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6207  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 &&
6208  "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6209 # endif
6210 
6211  VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
6212  d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
6213 
6214  return formatProperties;
6215  }
6216 
6217  template <typename X, typename Y, typename... Z, typename Dispatch>
6218  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
6219  PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6220  {
6221  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6222 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6223  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 &&
6224  "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6225 # endif
6226 
6227  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
6228  VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
6229  d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
6230 
6231  return structureChain;
6232  }
6233 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6234 
6235  template <typename Dispatch>
6237  PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
6238  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
6239  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6240  {
6241  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6242  return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
6243  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
6244  reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
6245  }
6246 
6247 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6248  template <typename Dispatch>
6250  PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
6251  {
6252  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6253 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6254  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 &&
6255  "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6256 # endif
6257 
6258  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
6260  d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
6261  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
6262  reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
6263  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
6264 
6265  return createResultValueType( result, imageFormatProperties );
6266  }
6267 
6268  template <typename X, typename Y, typename... Z, typename Dispatch>
6269  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
6270  PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
6271  {
6272  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6273 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6274  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 &&
6275  "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6276 # endif
6277 
6278  StructureChain<X, Y, Z...> structureChain;
6279  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
6281  d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
6282  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
6283  reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
6284  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
6285 
6286  return createResultValueType( result, structureChain );
6287  }
6288 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6289 
6290  template <typename Dispatch>
6291  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
6292  VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
6293  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6294  {
6295  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6296  d.vkGetPhysicalDeviceQueueFamilyProperties2(
6297  m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
6298  }
6299 
6300 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6301  template <typename QueueFamilyProperties2Allocator, typename Dispatch>
6302  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
6303  PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
6304  {
6305  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6306 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6307  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 &&
6308  "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6309 # endif
6310 
6311  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
6312  uint32_t queueFamilyPropertyCount;
6313  d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
6314  queueFamilyProperties.resize( queueFamilyPropertyCount );
6315  d.vkGetPhysicalDeviceQueueFamilyProperties2(
6316  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
6317 
6318  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
6319  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
6320  {
6321  queueFamilyProperties.resize( queueFamilyPropertyCount );
6322  }
6323  return queueFamilyProperties;
6324  }
6325 
6326  template <
6327  typename QueueFamilyProperties2Allocator,
6328  typename Dispatch,
6329  typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type>
6330  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
6331  PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
6332  {
6333  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6334 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6335  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 &&
6336  "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6337 # endif
6338 
6339  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
6340  uint32_t queueFamilyPropertyCount;
6341  d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
6342  queueFamilyProperties.resize( queueFamilyPropertyCount );
6343  d.vkGetPhysicalDeviceQueueFamilyProperties2(
6344  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
6345 
6346  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
6347  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
6348  {
6349  queueFamilyProperties.resize( queueFamilyPropertyCount );
6350  }
6351  return queueFamilyProperties;
6352  }
6353 
6354  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
6355  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
6356  PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
6357  {
6358  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6359 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6360  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 &&
6361  "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6362 # endif
6363 
6364  std::vector<StructureChain, StructureChainAllocator> structureChains;
6365  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
6366  uint32_t queueFamilyPropertyCount;
6367  d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
6368  structureChains.resize( queueFamilyPropertyCount );
6369  queueFamilyProperties.resize( queueFamilyPropertyCount );
6370  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
6371  {
6372  queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
6373  }
6374  d.vkGetPhysicalDeviceQueueFamilyProperties2(
6375  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
6376 
6377  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
6378  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
6379  {
6380  structureChains.resize( queueFamilyPropertyCount );
6381  }
6382  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
6383  {
6384  structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
6385  }
6386  return structureChains;
6387  }
6388 
6389  template <typename StructureChain,
6390  typename StructureChainAllocator,
6391  typename Dispatch,
6392  typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type>
6393  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
6394  PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
6395  {
6396  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6397 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6398  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 &&
6399  "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6400 # endif
6401 
6402  std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
6403  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
6404  uint32_t queueFamilyPropertyCount;
6405  d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
6406  structureChains.resize( queueFamilyPropertyCount );
6407  queueFamilyProperties.resize( queueFamilyPropertyCount );
6408  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
6409  {
6410  queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
6411  }
6412  d.vkGetPhysicalDeviceQueueFamilyProperties2(
6413  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
6414 
6415  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
6416  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
6417  {
6418  structureChains.resize( queueFamilyPropertyCount );
6419  }
6420  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
6421  {
6422  structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
6423  }
6424  return structureChains;
6425  }
6426 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6427 
6428  template <typename Dispatch>
6429  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
6430  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6431  {
6432  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6433  d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
6434  }
6435 
6436 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6437  template <typename Dispatch>
6438  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
6439  PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6440  {
6441  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6442 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6443  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 &&
6444  "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6445 # endif
6446 
6447  VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
6448  d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
6449 
6450  return memoryProperties;
6451  }
6452 
6453  template <typename X, typename Y, typename... Z, typename Dispatch>
6454  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
6455  PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6456  {
6457  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6458 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6459  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 &&
6460  "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6461 # endif
6462 
6463  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
6464  VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
6465  structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
6466  d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
6467 
6468  return structureChain;
6469  }
6470 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6471 
6472  template <typename Dispatch>
6473  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
6474  uint32_t * pPropertyCount,
6475  VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
6476  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6477  {
6478  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6479  d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
6480  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
6481  pPropertyCount,
6482  reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
6483  }
6484 
6485 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6486  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
6487  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
6488  PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
6489  {
6490  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6491 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6492  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 &&
6493  "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6494 # endif
6495 
6496  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
6497  uint32_t propertyCount;
6498  d.vkGetPhysicalDeviceSparseImageFormatProperties2(
6499  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
6500  properties.resize( propertyCount );
6501  d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
6502  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
6503  &propertyCount,
6504  reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
6505 
6506  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
6507  if ( propertyCount < properties.size() )
6508  {
6509  properties.resize( propertyCount );
6510  }
6511  return properties;
6512  }
6513 
6514  template <
6515  typename SparseImageFormatProperties2Allocator,
6516  typename Dispatch,
6517  typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value,
6518  int>::type>
6519  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
6520  PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
6521  SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
6522  Dispatch const & d ) const
6523  {
6524  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6525 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6526  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 &&
6527  "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
6528 # endif
6529 
6530  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
6531  uint32_t propertyCount;
6532  d.vkGetPhysicalDeviceSparseImageFormatProperties2(
6533  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
6534  properties.resize( propertyCount );
6535  d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
6536  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
6537  &propertyCount,
6538  reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
6539 
6540  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
6541  if ( propertyCount < properties.size() )
6542  {
6543  properties.resize( propertyCount );
6544  }
6545  return properties;
6546  }
6547 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6548 
6549  template <typename Dispatch>
6550  VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
6552  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6553  {
6554  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6555  d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
6556  }
6557 
6558  template <typename Dispatch>
6559  VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
6560  VULKAN_HPP_NAMESPACE::Queue * pQueue,
6561  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6562  {
6563  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6564  d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
6565  }
6566 
6567 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6568  template <typename Dispatch>
6569  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,
6570  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6571  {
6572  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6573 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6574  VULKAN_HPP_ASSERT( d.vkGetDeviceQueue2 && "Function <vkGetDeviceQueue2> requires <VK_VERSION_1_1>" );
6575 # endif
6576 
6577  VULKAN_HPP_NAMESPACE::Queue queue;
6578  d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
6579 
6580  return queue;
6581  }
6582 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6583 
6584  template <typename Dispatch>
6586  Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
6587  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6588  VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
6589  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6590  {
6591  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6592  return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device,
6593  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
6594  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
6595  reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
6596  }
6597 
6598 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6599  template <typename Dispatch>
6601  Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
6603  Dispatch const & d ) const
6604  {
6605  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6606 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6607  VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion &&
6608  "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" );
6609 # endif
6610 
6611  VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
6612  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion(
6613  m_device,
6614  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
6615  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6616  reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
6617  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
6618 
6619  return createResultValueType( result, ycbcrConversion );
6620  }
6621 
6622 # ifndef VULKAN_HPP_NO_SMART_HANDLE
6623  template <typename Dispatch>
6625  Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
6627  Dispatch const & d ) const
6628  {
6629  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6630 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6631  VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion &&
6632  "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" );
6633 # endif
6634 
6635  VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
6636  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion(
6637  m_device,
6638  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
6639  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6640  reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
6641  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" );
6642 
6643  return createResultValueType(
6645  }
6646 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
6647 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6648 
6649  template <typename Dispatch>
6650  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
6651  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6652  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6653  {
6654  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6655  d.vkDestroySamplerYcbcrConversion(
6656  m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6657  }
6658 
6659 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6660  template <typename Dispatch>
6661  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
6663  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6664  {
6665  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6666 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6667  VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion &&
6668  "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" );
6669 # endif
6670 
6671  d.vkDestroySamplerYcbcrConversion(
6672  m_device,
6673  static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
6674  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6675  }
6676 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6677 
6678  template <typename Dispatch>
6679  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
6680  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6681  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6682  {
6683  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6684  d.vkDestroySamplerYcbcrConversion(
6685  m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6686  }
6687 
6688 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6689  template <typename Dispatch>
6690  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
6692  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6693  {
6694  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6695 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6696  VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion &&
6697  "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" );
6698 # endif
6699 
6700  d.vkDestroySamplerYcbcrConversion(
6701  m_device,
6702  static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
6703  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6704  }
6705 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6706 
6707  template <typename Dispatch>
6709  Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
6710  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6711  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
6712  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6713  {
6714  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6715  return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device,
6716  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
6717  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
6718  reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
6719  }
6720 
6721 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6722  template <typename Dispatch>
6724  Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
6726  Dispatch const & d ) const
6727  {
6728  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6729 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6730  VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate &&
6731  "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
6732 # endif
6733 
6734  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
6735  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate(
6736  m_device,
6737  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
6738  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6739  reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
6740  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
6741 
6742  return createResultValueType( result, descriptorUpdateTemplate );
6743  }
6744 
6745 # ifndef VULKAN_HPP_NO_SMART_HANDLE
6746  template <typename Dispatch>
6748  Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
6750  Dispatch const & d ) const
6751  {
6752  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6753 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6754  VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate &&
6755  "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
6756 # endif
6757 
6758  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
6759  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate(
6760  m_device,
6761  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
6762  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6763  reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
6764  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" );
6765 
6766  return createResultValueType( result,
6768  descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
6769  }
6770 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
6771 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6772 
6773  template <typename Dispatch>
6774  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
6775  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6776  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6777  {
6778  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6779  d.vkDestroyDescriptorUpdateTemplate(
6780  m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6781  }
6782 
6783 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6784  template <typename Dispatch>
6785  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
6787  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6788  {
6789  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6790 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6791  VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate &&
6792  "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
6793 # endif
6794 
6795  d.vkDestroyDescriptorUpdateTemplate(
6796  m_device,
6797  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
6798  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6799  }
6800 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6801 
6802  template <typename Dispatch>
6803  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
6804  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6805  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6806  {
6807  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6808  d.vkDestroyDescriptorUpdateTemplate(
6809  m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6810  }
6811 
6812 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6813  template <typename Dispatch>
6814  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
6816  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6817  {
6818  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6819 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6820  VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate &&
6821  "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
6822 # endif
6823 
6824  d.vkDestroyDescriptorUpdateTemplate(
6825  m_device,
6826  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
6827  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6828  }
6829 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6830 
6831  template <typename Dispatch>
6832  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
6833  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
6834  const void * pData,
6835  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6836  {
6837  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6838  d.vkUpdateDescriptorSetWithTemplate(
6839  m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
6840  }
6841 
6842 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6843  template <typename DataType, typename Dispatch>
6844  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
6845  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
6846  DataType const & data,
6847  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6848  {
6849  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6850 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6851  VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplate &&
6852  "Function <vkUpdateDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
6853 # endif
6854 
6855  d.vkUpdateDescriptorSetWithTemplate( m_device,
6856  static_cast<VkDescriptorSet>( descriptorSet ),
6857  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
6858  reinterpret_cast<const void *>( &data ) );
6859  }
6860 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6861 
6862  template <typename Dispatch>
6863  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
6864  VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
6865  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6866  {
6867  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6868  d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
6869  reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
6870  reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
6871  }
6872 
6873 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6874  template <typename Dispatch>
6875  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
6876  PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
6877  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6878  {
6879  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6880 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6881  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferProperties &&
6882  "Function <vkGetPhysicalDeviceExternalBufferProperties> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" );
6883 # endif
6884 
6885  VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
6886  d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
6887  reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
6888  reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
6889 
6890  return externalBufferProperties;
6891  }
6892 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6893 
6894  template <typename Dispatch>
6895  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
6896  VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
6897  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6898  {
6899  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6900  d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
6901  reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
6902  reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
6903  }
6904 
6905 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6906  template <typename Dispatch>
6907  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
6908  PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
6909  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6910  {
6911  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6912 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6913  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFenceProperties &&
6914  "Function <vkGetPhysicalDeviceExternalFenceProperties> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" );
6915 # endif
6916 
6917  VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
6918  d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
6919  reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
6920  reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
6921 
6922  return externalFenceProperties;
6923  }
6924 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6925 
6926  template <typename Dispatch>
6927  VULKAN_HPP_INLINE void
6928  PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
6929  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
6930  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6931  {
6932  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6933  d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
6934  reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
6935  reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
6936  }
6937 
6938 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6939  template <typename Dispatch>
6940  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
6941  PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
6942  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6943  {
6944  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6945 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6946  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphoreProperties &&
6947  "Function <vkGetPhysicalDeviceExternalSemaphoreProperties> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" );
6948 # endif
6949 
6950  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
6951  d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
6952  reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
6953  reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
6954 
6955  return externalSemaphoreProperties;
6956  }
6957 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6958 
6959  template <typename Dispatch>
6960  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
6961  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
6962  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6963  {
6964  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6965  d.vkGetDescriptorSetLayoutSupport(
6966  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
6967  }
6968 
6969 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6970  template <typename Dispatch>
6971  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
6972  Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
6973  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6974  {
6975  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6976 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6977  VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
6978 # endif
6979 
6980  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
6981  d.vkGetDescriptorSetLayoutSupport(
6982  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
6983 
6984  return support;
6985  }
6986 
6987  template <typename X, typename Y, typename... Z, typename Dispatch>
6988  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
6989  Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
6990  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6991  {
6992  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6993 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
6994  VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
6995 # endif
6996 
6997  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
6998  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
6999  d.vkGetDescriptorSetLayoutSupport(
7000  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
7001 
7002  return structureChain;
7003  }
7004 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7005 
7006  //=== VK_VERSION_1_2 ===
7007 
7008  template <typename Dispatch>
7009  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
7011  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
7012  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
7013  uint32_t maxDrawCount,
7014  uint32_t stride,
7015  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7016  {
7017  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7018  d.vkCmdDrawIndirectCount( m_commandBuffer,
7019  static_cast<VkBuffer>( buffer ),
7020  static_cast<VkDeviceSize>( offset ),
7021  static_cast<VkBuffer>( countBuffer ),
7022  static_cast<VkDeviceSize>( countBufferOffset ),
7023  maxDrawCount,
7024  stride );
7025  }
7026 
7027  template <typename Dispatch>
7028  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
7030  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
7031  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
7032  uint32_t maxDrawCount,
7033  uint32_t stride,
7034  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7035  {
7036  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7037  d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
7038  static_cast<VkBuffer>( buffer ),
7039  static_cast<VkDeviceSize>( offset ),
7040  static_cast<VkBuffer>( countBuffer ),
7041  static_cast<VkDeviceSize>( countBufferOffset ),
7042  maxDrawCount,
7043  stride );
7044  }
7045 
7046  template <typename Dispatch>
7047  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
7048  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
7049  VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
7050  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7051  {
7052  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7053  return static_cast<Result>( d.vkCreateRenderPass2( m_device,
7054  reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
7055  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
7056  reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
7057  }
7058 
7059 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7060  template <typename Dispatch>
7062  Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
7064  Dispatch const & d ) const
7065  {
7066  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7067 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7068  VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
7069 # endif
7070 
7071  VULKAN_HPP_NAMESPACE::RenderPass renderPass;
7073  d.vkCreateRenderPass2( m_device,
7074  reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
7075  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
7076  reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
7077  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
7078 
7079  return createResultValueType( result, renderPass );
7080  }
7081 
7082 # ifndef VULKAN_HPP_NO_SMART_HANDLE
7083  template <typename Dispatch>
7085  Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
7087  Dispatch const & d ) const
7088  {
7089  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7090 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7091  VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
7092 # endif
7093 
7094  VULKAN_HPP_NAMESPACE::RenderPass renderPass;
7096  d.vkCreateRenderPass2( m_device,
7097  reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
7098  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
7099  reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
7100  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" );
7101 
7102  return createResultValueType(
7103  result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
7104  }
7105 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
7106 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7107 
7108  template <typename Dispatch>
7109  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
7110  const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
7111  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7112  {
7113  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7114  d.vkCmdBeginRenderPass2(
7115  m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
7116  }
7117 
7118 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7119  template <typename Dispatch>
7120  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
7121  const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
7122  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7123  {
7124  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7125 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7126  VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2 && "Function <vkCmdBeginRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
7127 # endif
7128 
7129  d.vkCmdBeginRenderPass2(
7130  m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
7131  }
7132 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7133 
7134  template <typename Dispatch>
7135  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
7136  const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
7137  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7138  {
7139  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7140  d.vkCmdNextSubpass2(
7141  m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
7142  }
7143 
7144 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7145  template <typename Dispatch>
7146  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
7147  const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
7148  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7149  {
7150  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7151 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7152  VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2 && "Function <vkCmdNextSubpass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
7153 # endif
7154 
7155  d.vkCmdNextSubpass2(
7156  m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
7157  }
7158 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7159 
7160  template <typename Dispatch>
7161  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
7162  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7163  {
7164  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7165  d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
7166  }
7167 
7168 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7169  template <typename Dispatch>
7170  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
7171  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7172  {
7173  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7174 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7175  VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2 && "Function <vkCmdEndRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
7176 # endif
7177 
7178  d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
7179  }
7180 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7181 
7182  template <typename Dispatch>
7183  VULKAN_HPP_INLINE void
7184  Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7185  {
7186  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7187  d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
7188  }
7189 
7190  template <typename Dispatch>
7191  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
7192  uint64_t * pValue,
7193  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7194  {
7195  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7196  return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
7197  }
7198 
7199 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7200  template <typename Dispatch>
7201  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
7202  Dispatch const & d ) const
7203  {
7204  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7205 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7206  VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValue && "Function <vkGetSemaphoreCounterValue> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
7207 # endif
7208 
7209  uint64_t value;
7211  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
7212  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
7213 
7214  return createResultValueType( result, value );
7215  }
7216 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7217 
7218  template <typename Dispatch>
7219  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
7220  uint64_t timeout,
7221  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7222  {
7223  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7224  return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
7225  }
7226 
7227 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7228  template <typename Dispatch>
7230  Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
7231  {
7232  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7233 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7234  VULKAN_HPP_ASSERT( d.vkWaitSemaphores && "Function <vkWaitSemaphores> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
7235 # endif
7236 
7238  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
7239  resultCheck(
7240  result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
7241 
7242  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
7243  }
7244 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7245 
7246  template <typename Dispatch>
7247  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
7248  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7249  {
7250  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7251  return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
7252  }
7253 
7254 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7255  template <typename Dispatch>
7257  Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
7258  {
7259  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7260 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7261  VULKAN_HPP_ASSERT( d.vkSignalSemaphore && "Function <vkSignalSemaphore> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
7262 # endif
7263 
7265  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
7266  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
7267 
7268  return createResultValueType( result );
7269  }
7270 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7271 
7272  template <typename Dispatch>
7273  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
7274  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7275  {
7276  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7277  return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
7278  }
7279 
7280 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7281  template <typename Dispatch>
7282  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
7283  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7284  {
7285  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7286 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7287  VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddress &&
7288  "Function <vkGetBufferDeviceAddress> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
7289 # endif
7290 
7291  VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
7292 
7293  return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
7294  }
7295 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7296 
7297  template <typename Dispatch>
7298  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
7299  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7300  {
7301  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7302  return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
7303  }
7304 
7305 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7306  template <typename Dispatch>
7307  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
7308  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7309  {
7310  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7311 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7312  VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddress &&
7313  "Function <vkGetBufferOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
7314 # endif
7315 
7316  uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
7317 
7318  return result;
7319  }
7320 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7321 
7322  template <typename Dispatch>
7323  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
7324  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7325  {
7326  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7327  return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
7328  }
7329 
7330 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7331  template <typename Dispatch>
7332  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
7333  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7334  {
7335  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7336 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7337  VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddress &&
7338  "Function <vkGetDeviceMemoryOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
7339 # endif
7340 
7341  uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
7342 
7343  return result;
7344  }
7345 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7346 
7347  //=== VK_VERSION_1_3 ===
7348 
7349  template <typename Dispatch>
7350  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount,
7351  VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
7352  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7353  {
7354  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7355  return static_cast<Result>(
7356  d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
7357  }
7358 
7359 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7360  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
7363  PhysicalDevice::getToolProperties( Dispatch const & d ) const
7364  {
7365  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7366 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7367  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties &&
7368  "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" );
7369 # endif
7370 
7371  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
7372  uint32_t toolCount;
7374  do
7375  {
7376  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) );
7377  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount )
7378  {
7379  toolProperties.resize( toolCount );
7380  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
7381  d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
7382  }
7383  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
7384  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
7385  VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
7386  if ( toolCount < toolProperties.size() )
7387  {
7388  toolProperties.resize( toolCount );
7389  }
7390  return createResultValueType( result, toolProperties );
7391  }
7392 
7393  template <
7394  typename PhysicalDeviceToolPropertiesAllocator,
7395  typename Dispatch,
7396  typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value,
7397  int>::type>
7400  PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
7401  {
7402  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7403 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7404  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties &&
7405  "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" );
7406 # endif
7407 
7408  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
7409  physicalDeviceToolPropertiesAllocator );
7410  uint32_t toolCount;
7412  do
7413  {
7414  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) );
7415  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount )
7416  {
7417  toolProperties.resize( toolCount );
7418  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
7419  d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
7420  }
7421  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
7422  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
7423  VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
7424  if ( toolCount < toolProperties.size() )
7425  {
7426  toolProperties.resize( toolCount );
7427  }
7428  return createResultValueType( result, toolProperties );
7429  }
7430 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7431 
7432  template <typename Dispatch>
7433  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
7434  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
7435  VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
7436  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7437  {
7438  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7439  return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device,
7440  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
7441  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
7442  reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
7443  }
7444 
7445 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7446  template <typename Dispatch>
7448  Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
7450  Dispatch const & d ) const
7451  {
7452  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7453 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7454  VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
7455 # endif
7456 
7457  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
7459  d.vkCreatePrivateDataSlot( m_device,
7460  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
7461  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
7462  reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
7463  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" );
7464 
7465  return createResultValueType( result, privateDataSlot );
7466  }
7467 
7468 # ifndef VULKAN_HPP_NO_SMART_HANDLE
7469  template <typename Dispatch>
7471  Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
7473  Dispatch const & d ) const
7474  {
7475  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7476 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7477  VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
7478 # endif
7479 
7480  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
7482  d.vkCreatePrivateDataSlot( m_device,
7483  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
7484  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
7485  reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
7486  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" );
7487 
7488  return createResultValueType(
7489  result, UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
7490  }
7491 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
7492 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7493 
7494  template <typename Dispatch>
7495  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
7496  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
7497  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7498  {
7499  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7500  d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7501  }
7502 
7503 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7504  template <typename Dispatch>
7505  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
7507  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7508  {
7509  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7510 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7511  VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
7512 # endif
7513 
7514  d.vkDestroyPrivateDataSlot(
7515  m_device,
7516  static_cast<VkPrivateDataSlot>( privateDataSlot ),
7517  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7518  }
7519 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7520 
7521  template <typename Dispatch>
7522  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
7523  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
7524  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7525  {
7526  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7527  d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7528  }
7529 
7530 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7531  template <typename Dispatch>
7532  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
7534  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7535  {
7536  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7537 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7538  VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
7539 # endif
7540 
7541  d.vkDestroyPrivateDataSlot(
7542  m_device,
7543  static_cast<VkPrivateDataSlot>( privateDataSlot ),
7544  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7545  }
7546 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7547 
7548 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
7549  template <typename Dispatch>
7551  uint64_t objectHandle,
7552  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
7553  uint64_t data,
7554  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7555  {
7556  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7557  return static_cast<Result>(
7558  d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
7559  }
7560 #else
7561  template <typename Dispatch>
7563  uint64_t objectHandle,
7564  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
7565  uint64_t data,
7566  Dispatch const & d ) const
7567  {
7568  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7569 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7570  VULKAN_HPP_ASSERT( d.vkSetPrivateData && "Function <vkSetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
7571 # endif
7572 
7574  d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
7575  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
7576 
7577  return createResultValueType( result );
7578  }
7579 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
7580 
7581  template <typename Dispatch>
7582  VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
7583  uint64_t objectHandle,
7584  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
7585  uint64_t * pData,
7586  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7587  {
7588  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7589  d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
7590  }
7591 
7592 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7593  template <typename Dispatch>
7594  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
7595  uint64_t objectHandle,
7596  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
7597  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7598  {
7599  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7600 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7601  VULKAN_HPP_ASSERT( d.vkGetPrivateData && "Function <vkGetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
7602 # endif
7603 
7604  uint64_t data;
7605  d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
7606 
7607  return data;
7608  }
7609 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7610 
7611  template <typename Dispatch>
7612  VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
7613  const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
7614  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7615  {
7616  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7617  d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
7618  }
7619 
7620 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7621  template <typename Dispatch>
7622  VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
7623  const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
7624  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7625  {
7626  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7627 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7628  VULKAN_HPP_ASSERT( d.vkCmdSetEvent2 && "Function <vkCmdSetEvent2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
7629 # endif
7630 
7631  d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
7632  }
7633 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7634 
7635  template <typename Dispatch>
7636  VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
7638  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7639  {
7640  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7641  d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
7642  }
7643 
7644  template <typename Dispatch>
7645  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount,
7646  const VULKAN_HPP_NAMESPACE::Event * pEvents,
7647  const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
7648  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7649  {
7650  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7651  d.vkCmdWaitEvents2(
7652  m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
7653  }
7654 
7655 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7656  template <typename Dispatch>
7657  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
7658  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
7659  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
7660  {
7661  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7662 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7663  VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2 && "Function <vkCmdWaitEvents2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
7664 # endif
7665 # ifdef VULKAN_HPP_NO_EXCEPTIONS
7666  VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
7667 # else
7668  if ( events.size() != dependencyInfos.size() )
7669  {
7670  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
7671  }
7672 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
7673 
7674  d.vkCmdWaitEvents2( m_commandBuffer,
7675  events.size(),
7676  reinterpret_cast<const VkEvent *>( events.data() ),
7677  reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
7678  }
7679 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7680 
7681  template <typename Dispatch>
7682  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
7683  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7684  {
7685  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7686  d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
7687  }
7688 
7689 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7690  template <typename Dispatch>
7691  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
7692  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7693  {
7694  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7695 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7696  VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2 && "Function <vkCmdPipelineBarrier2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
7697 # endif
7698 
7699  d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
7700  }
7701 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7702 
7703  template <typename Dispatch>
7704  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
7705  VULKAN_HPP_NAMESPACE::QueryPool queryPool,
7706  uint32_t query,
7707  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7708  {
7709  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7710  d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
7711  }
7712 
7713  template <typename Dispatch>
7714  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount,
7715  const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
7716  VULKAN_HPP_NAMESPACE::Fence fence,
7717  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7718  {
7719  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7720  return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
7721  }
7722 
7723 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7724  template <typename Dispatch>
7726  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
7727  {
7728  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7729 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7730  VULKAN_HPP_ASSERT( d.vkQueueSubmit2 && "Function <vkQueueSubmit2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
7731 # endif
7732 
7734  d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) );
7735  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
7736 
7737  return createResultValueType( result );
7738  }
7739 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7740 
7741  template <typename Dispatch>
7742  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
7743  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7744  {
7745  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7746  d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
7747  }
7748 
7749 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7750  template <typename Dispatch>
7751  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
7752  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7753  {
7754  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7755 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7756  VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2 && "Function <vkCmdCopyBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
7757 # endif
7758 
7759  d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
7760  }
7761 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7762 
7763  template <typename Dispatch>
7764  VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7765  {
7766  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7767  d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
7768  }
7769 
7770 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7771  template <typename Dispatch>
7772  VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7773  {
7774  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7775 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7776  VULKAN_HPP_ASSERT( d.vkCmdCopyImage2 && "Function <vkCmdCopyImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
7777 # endif
7778 
7779  d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
7780  }
7781 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7782 
7783  template <typename Dispatch>
7784  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
7785  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7786  {
7787  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7788  d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
7789  }
7790 
7791 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7792  template <typename Dispatch>
7793  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
7794  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7795  {
7796  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7797 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7798  VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2 && "Function <vkCmdCopyBufferToImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
7799 # endif
7800 
7801  d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
7802  }
7803 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7804 
7805  template <typename Dispatch>
7806  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
7807  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7808  {
7809  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7810  d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
7811  }
7812 
7813 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7814  template <typename Dispatch>
7815  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
7816  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7817  {
7818  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7819 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7820  VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2 && "Function <vkCmdCopyImageToBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
7821 # endif
7822 
7823  d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
7824  }
7825 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7826 
7827  template <typename Dispatch>
7828  VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7829  {
7830  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7831  d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
7832  }
7833 
7834 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7835  template <typename Dispatch>
7836  VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7837  {
7838  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7839 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7840  VULKAN_HPP_ASSERT( d.vkCmdBlitImage2 && "Function <vkCmdBlitImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
7841 # endif
7842 
7843  d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
7844  }
7845 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7846 
7847  template <typename Dispatch>
7848  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
7849  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7850  {
7851  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7852  d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
7853  }
7854 
7855 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7856  template <typename Dispatch>
7857  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
7858  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7859  {
7860  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7861 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7862  VULKAN_HPP_ASSERT( d.vkCmdResolveImage2 && "Function <vkCmdResolveImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
7863 # endif
7864 
7865  d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
7866  }
7867 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7868 
7869  template <typename Dispatch>
7870  VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
7871  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7872  {
7873  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7874  d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
7875  }
7876 
7877 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7878  template <typename Dispatch>
7879  VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
7880  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7881  {
7882  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7883 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7884  VULKAN_HPP_ASSERT( d.vkCmdBeginRendering && "Function <vkCmdBeginRendering> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
7885 # endif
7886 
7887  d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
7888  }
7889 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7890 
7891  template <typename Dispatch>
7892  VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7893  {
7894  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7895  d.vkCmdEndRendering( m_commandBuffer );
7896  }
7897 
7898  template <typename Dispatch>
7899  VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7900  {
7901  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7902  d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
7903  }
7904 
7905  template <typename Dispatch>
7906  VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7907  {
7908  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7909  d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
7910  }
7911 
7912  template <typename Dispatch>
7913  VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
7914  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7915  {
7916  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7917  d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
7918  }
7919 
7920  template <typename Dispatch>
7921  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount,
7922  const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
7923  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7924  {
7925  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7926  d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
7927  }
7928 
7929 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7930  template <typename Dispatch>
7931  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
7932  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7933  {
7934  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7935 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7936  VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCount &&
7937  "Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
7938 # endif
7939 
7940  d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
7941  }
7942 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7943 
7944  template <typename Dispatch>
7945  VULKAN_HPP_INLINE void
7946  CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7947  {
7948  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7949  d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
7950  }
7951 
7952 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7953  template <typename Dispatch>
7954  VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
7955  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7956  {
7957  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7958 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7959  VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCount &&
7960  "Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
7961 # endif
7962 
7963  d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
7964  }
7965 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7966 
7967  template <typename Dispatch>
7968  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
7969  uint32_t bindingCount,
7970  const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
7971  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
7972  const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
7973  const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
7974  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7975  {
7976  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7977  d.vkCmdBindVertexBuffers2( m_commandBuffer,
7978  firstBinding,
7979  bindingCount,
7980  reinterpret_cast<const VkBuffer *>( pBuffers ),
7981  reinterpret_cast<const VkDeviceSize *>( pOffsets ),
7982  reinterpret_cast<const VkDeviceSize *>( pSizes ),
7983  reinterpret_cast<const VkDeviceSize *>( pStrides ) );
7984  }
7985 
7986 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7987  template <typename Dispatch>
7988  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
7989  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
7990  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
7991  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
7992  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
7993  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
7994  {
7995  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7996 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
7997  VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2 &&
7998  "Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
7999 # endif
8000 # ifdef VULKAN_HPP_NO_EXCEPTIONS
8001  VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
8002  VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
8003  VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
8004 # else
8005  if ( buffers.size() != offsets.size() )
8006  {
8007  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
8008  }
8009  if ( !sizes.empty() && buffers.size() != sizes.size() )
8010  {
8011  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
8012  }
8013  if ( !strides.empty() && buffers.size() != strides.size() )
8014  {
8015  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
8016  }
8017 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
8018 
8019  d.vkCmdBindVertexBuffers2( m_commandBuffer,
8020  firstBinding,
8021  buffers.size(),
8022  reinterpret_cast<const VkBuffer *>( buffers.data() ),
8023  reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
8024  reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
8025  reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
8026  }
8027 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8028 
8029  template <typename Dispatch>
8030  VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8031  {
8032  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8033  d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
8034  }
8035 
8036  template <typename Dispatch>
8037  VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8038  {
8039  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8040  d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
8041  }
8042 
8043  template <typename Dispatch>
8044  VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8045  {
8046  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8047  d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
8048  }
8049 
8050  template <typename Dispatch>
8051  VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
8052  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8053  {
8054  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8055  d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
8056  }
8057 
8058  template <typename Dispatch>
8059  VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8060  {
8061  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8062  d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
8063  }
8064 
8065  template <typename Dispatch>
8066  VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
8069  VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
8071  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8072  {
8073  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8074  d.vkCmdSetStencilOp( m_commandBuffer,
8075  static_cast<VkStencilFaceFlags>( faceMask ),
8076  static_cast<VkStencilOp>( failOp ),
8077  static_cast<VkStencilOp>( passOp ),
8078  static_cast<VkStencilOp>( depthFailOp ),
8079  static_cast<VkCompareOp>( compareOp ) );
8080  }
8081 
8082  template <typename Dispatch>
8083  VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
8084  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8085  {
8086  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8087  d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
8088  }
8089 
8090  template <typename Dispatch>
8091  VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8092  {
8093  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8094  d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
8095  }
8096 
8097  template <typename Dispatch>
8098  VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
8099  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8100  {
8101  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8102  d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
8103  }
8104 
8105  template <typename Dispatch>
8106  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
8107  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
8108  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8109  {
8110  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8111  d.vkGetDeviceBufferMemoryRequirements(
8112  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
8113  }
8114 
8115 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8116  template <typename Dispatch>
8117  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
8118  Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8119  {
8120  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8121 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8122  VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements &&
8123  "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
8124 # endif
8125 
8126  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
8127  d.vkGetDeviceBufferMemoryRequirements(
8128  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
8129 
8130  return memoryRequirements;
8131  }
8132 
8133  template <typename X, typename Y, typename... Z, typename Dispatch>
8134  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
8135  Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8136  {
8137  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8138 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8139  VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements &&
8140  "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
8141 # endif
8142 
8143  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
8144  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
8145  d.vkGetDeviceBufferMemoryRequirements(
8146  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
8147 
8148  return structureChain;
8149  }
8150 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8151 
8152  template <typename Dispatch>
8153  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
8154  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
8155  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8156  {
8157  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8158  d.vkGetDeviceImageMemoryRequirements(
8159  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
8160  }
8161 
8162 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8163  template <typename Dispatch>
8164  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
8165  Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8166  {
8167  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8168 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8169  VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements &&
8170  "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
8171 # endif
8172 
8173  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
8174  d.vkGetDeviceImageMemoryRequirements(
8175  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
8176 
8177  return memoryRequirements;
8178  }
8179 
8180  template <typename X, typename Y, typename... Z, typename Dispatch>
8181  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
8182  Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8183  {
8184  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8185 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8186  VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements &&
8187  "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
8188 # endif
8189 
8190  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
8191  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
8192  d.vkGetDeviceImageMemoryRequirements(
8193  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
8194 
8195  return structureChain;
8196  }
8197 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8198 
8199  template <typename Dispatch>
8200  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
8201  uint32_t * pSparseMemoryRequirementCount,
8202  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
8203  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8204  {
8205  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8206  d.vkGetDeviceImageSparseMemoryRequirements( m_device,
8207  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
8208  pSparseMemoryRequirementCount,
8209  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
8210  }
8211 
8212 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8213  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
8214  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
8215  Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
8216  {
8217  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8218 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8219  VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements &&
8220  "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
8221 # endif
8222 
8223  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
8224  uint32_t sparseMemoryRequirementCount;
8225  d.vkGetDeviceImageSparseMemoryRequirements(
8226  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
8227  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
8228  d.vkGetDeviceImageSparseMemoryRequirements( m_device,
8229  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
8230  &sparseMemoryRequirementCount,
8231  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
8232 
8233  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
8234  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
8235  {
8236  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
8237  }
8238  return sparseMemoryRequirements;
8239  }
8240 
8241  template <typename SparseImageMemoryRequirements2Allocator,
8242  typename Dispatch,
8243  typename std::enable_if<
8244  std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value,
8245  int>::type>
8246  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
8247  Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
8248  SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
8249  Dispatch const & d ) const
8250  {
8251  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8252 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8253  VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements &&
8254  "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
8255 # endif
8256 
8257  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
8258  sparseImageMemoryRequirements2Allocator );
8259  uint32_t sparseMemoryRequirementCount;
8260  d.vkGetDeviceImageSparseMemoryRequirements(
8261  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
8262  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
8263  d.vkGetDeviceImageSparseMemoryRequirements( m_device,
8264  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
8265  &sparseMemoryRequirementCount,
8266  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
8267 
8268  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
8269  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
8270  {
8271  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
8272  }
8273  return sparseMemoryRequirements;
8274  }
8275 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8276 
8277  //=== VK_KHR_surface ===
8278 
8279  template <typename Dispatch>
8280  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8281  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8282  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8283  {
8284  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8285  d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
8286  }
8287 
8288 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8289  template <typename Dispatch>
8290  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8292  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8293  {
8294  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8295 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8296  VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" );
8297 # endif
8298 
8299  d.vkDestroySurfaceKHR( m_instance,
8300  static_cast<VkSurfaceKHR>( surface ),
8301  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
8302  }
8303 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8304 
8305  template <typename Dispatch>
8306  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8307  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8308  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8309  {
8310  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8311  d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
8312  }
8313 
8314 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8315  template <typename Dispatch>
8316  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8318  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8319  {
8320  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8321 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8322  VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" );
8323 # endif
8324 
8325  d.vkDestroySurfaceKHR( m_instance,
8326  static_cast<VkSurfaceKHR>( surface ),
8327  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
8328  }
8329 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8330 
8331  template <typename Dispatch>
8332  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
8333  VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8334  VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
8335  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8336  {
8337  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8338  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR(
8339  m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
8340  }
8341 
8342 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8343  template <typename Dispatch>
8345  PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
8346  {
8347  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8348 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8349  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceSupportKHR && "Function <vkGetPhysicalDeviceSurfaceSupportKHR> requires <VK_KHR_surface>" );
8350 # endif
8351 
8352  VULKAN_HPP_NAMESPACE::Bool32 supported;
8353  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR(
8354  m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) ) );
8355  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
8356 
8357  return createResultValueType( result, supported );
8358  }
8359 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8360 
8361  template <typename Dispatch>
8362  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8363  VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
8364  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8365  {
8366  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8367  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
8368  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
8369  }
8370 
8371 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8372  template <typename Dispatch>
8374  PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
8375  {
8376  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8377 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8378  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR && "Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> requires <VK_KHR_surface>" );
8379 # endif
8380 
8381  VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
8382  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
8383  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) );
8384  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
8385 
8386  return createResultValueType( result, surfaceCapabilities );
8387  }
8388 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8389 
8390  template <typename Dispatch>
8391  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8392  uint32_t * pSurfaceFormatCount,
8393  VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
8394  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8395  {
8396  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8397  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
8398  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
8399  }
8400 
8401 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8402  template <typename SurfaceFormatKHRAllocator, typename Dispatch>
8404  PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
8405  {
8406  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8407 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8408  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" );
8409 # endif
8410 
8411  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
8412  uint32_t surfaceFormatCount;
8414  do
8415  {
8416  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
8417  d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
8418  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
8419  {
8420  surfaceFormats.resize( surfaceFormatCount );
8421  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
8422  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
8423  }
8424  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
8425  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
8426  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
8427  if ( surfaceFormatCount < surfaceFormats.size() )
8428  {
8429  surfaceFormats.resize( surfaceFormatCount );
8430  }
8431  return createResultValueType( result, surfaceFormats );
8432  }
8433 
8434  template <typename SurfaceFormatKHRAllocator,
8435  typename Dispatch,
8436  typename std::enable_if<std::is_same<typename SurfaceFormatKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, int>::type>
8438  PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8439  SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
8440  Dispatch const & d ) const
8441  {
8442  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8443 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8444  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" );
8445 # endif
8446 
8447  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
8448  uint32_t surfaceFormatCount;
8450  do
8451  {
8452  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
8453  d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
8454  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
8455  {
8456  surfaceFormats.resize( surfaceFormatCount );
8457  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
8458  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
8459  }
8460  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
8461  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
8462  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
8463  if ( surfaceFormatCount < surfaceFormats.size() )
8464  {
8465  surfaceFormats.resize( surfaceFormatCount );
8466  }
8467  return createResultValueType( result, surfaceFormats );
8468  }
8469 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8470 
8471  template <typename Dispatch>
8472  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8473  uint32_t * pPresentModeCount,
8474  VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
8475  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8476  {
8477  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8478  return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
8479  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
8480  }
8481 
8482 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8483  template <typename PresentModeKHRAllocator, typename Dispatch>
8485  PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
8486  {
8487  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8488 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8489  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" );
8490 # endif
8491 
8492  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
8493  uint32_t presentModeCount;
8495  do
8496  {
8497  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
8498  d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
8499  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount )
8500  {
8501  presentModes.resize( presentModeCount );
8502  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
8503  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
8504  }
8505  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
8506  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
8507  VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
8508  if ( presentModeCount < presentModes.size() )
8509  {
8510  presentModes.resize( presentModeCount );
8511  }
8512  return createResultValueType( result, presentModes );
8513  }
8514 
8515  template <typename PresentModeKHRAllocator,
8516  typename Dispatch,
8517  typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type>
8519  PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8520  PresentModeKHRAllocator & presentModeKHRAllocator,
8521  Dispatch const & d ) const
8522  {
8523  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8524 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8525  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" );
8526 # endif
8527 
8528  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
8529  uint32_t presentModeCount;
8531  do
8532  {
8533  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
8534  d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
8535  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount )
8536  {
8537  presentModes.resize( presentModeCount );
8538  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
8539  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
8540  }
8541  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
8542  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
8543  VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
8544  if ( presentModeCount < presentModes.size() )
8545  {
8546  presentModes.resize( presentModeCount );
8547  }
8548  return createResultValueType( result, presentModes );
8549  }
8550 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8551 
8552  //=== VK_KHR_swapchain ===
8553 
8554  template <typename Dispatch>
8555  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
8556  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8557  VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
8558  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8559  {
8560  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8561  return static_cast<Result>( d.vkCreateSwapchainKHR( m_device,
8562  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
8563  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8564  reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
8565  }
8566 
8567 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8568  template <typename Dispatch>
8570  Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
8572  Dispatch const & d ) const
8573  {
8574  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8575 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8576  VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" );
8577 # endif
8578 
8579  VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
8581  d.vkCreateSwapchainKHR( m_device,
8582  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
8583  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8584  reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
8585  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
8586 
8587  return createResultValueType( result, swapchain );
8588  }
8589 
8590 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8591  template <typename Dispatch>
8593  Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
8595  Dispatch const & d ) const
8596  {
8597  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8599  VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" );
8600 # endif
8601 
8602  VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
8604  d.vkCreateSwapchainKHR( m_device,
8605  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
8606  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8607  reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
8608  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" );
8609 
8610  return createResultValueType(
8612  }
8613 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
8614 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8615 
8616  template <typename Dispatch>
8617  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
8618  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8619  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8620  {
8621  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8622  d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
8623  }
8624 
8625 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8626  template <typename Dispatch>
8627  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
8629  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8630  {
8631  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8632 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8633  VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" );
8634 # endif
8635 
8636  d.vkDestroySwapchainKHR( m_device,
8637  static_cast<VkSwapchainKHR>( swapchain ),
8638  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
8639  }
8640 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8641 
8642  template <typename Dispatch>
8643  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
8644  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8645  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8646  {
8647  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8648  d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
8649  }
8650 
8651 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8652  template <typename Dispatch>
8653  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
8655  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8656  {
8657  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8658 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8659  VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" );
8660 # endif
8661 
8662  d.vkDestroySwapchainKHR( m_device,
8663  static_cast<VkSwapchainKHR>( swapchain ),
8664  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
8665  }
8666 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8667 
8668  template <typename Dispatch>
8669  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
8670  uint32_t * pSwapchainImageCount,
8671  VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
8672  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8673  {
8674  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8675  return static_cast<Result>(
8676  d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
8677  }
8678 
8679 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8680  template <typename ImageAllocator, typename Dispatch>
8682  Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
8683  {
8684  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8685 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8686  VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" );
8687 # endif
8688 
8689  std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages;
8690  uint32_t swapchainImageCount;
8692  do
8693  {
8694  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
8695  d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
8696  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount )
8697  {
8698  swapchainImages.resize( swapchainImageCount );
8699  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR(
8700  m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
8701  }
8702  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
8703  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
8704  VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
8705  if ( swapchainImageCount < swapchainImages.size() )
8706  {
8707  swapchainImages.resize( swapchainImageCount );
8708  }
8709  return createResultValueType( result, swapchainImages );
8710  }
8711 
8712  template <typename ImageAllocator,
8713  typename Dispatch,
8714  typename std::enable_if<std::is_same<typename ImageAllocator::value_type, VULKAN_HPP_NAMESPACE::Image>::value, int>::type>
8716  Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const
8717  {
8718  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8719 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8720  VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" );
8721 # endif
8722 
8723  std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator );
8724  uint32_t swapchainImageCount;
8726  do
8727  {
8728  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
8729  d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
8730  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount )
8731  {
8732  swapchainImages.resize( swapchainImageCount );
8733  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR(
8734  m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
8735  }
8736  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
8737  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
8738  VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
8739  if ( swapchainImageCount < swapchainImages.size() )
8740  {
8741  swapchainImages.resize( swapchainImageCount );
8742  }
8743  return createResultValueType( result, swapchainImages );
8744  }
8745 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8746 
8747  template <typename Dispatch>
8748  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
8749  uint64_t timeout,
8750  VULKAN_HPP_NAMESPACE::Semaphore semaphore,
8751  VULKAN_HPP_NAMESPACE::Fence fence,
8752  uint32_t * pImageIndex,
8753  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8754  {
8755  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8756  return static_cast<Result>( d.vkAcquireNextImageKHR(
8757  m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
8758  }
8759 
8760 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8761  template <typename Dispatch>
8762  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
8763  uint64_t timeout,
8764  VULKAN_HPP_NAMESPACE::Semaphore semaphore,
8765  VULKAN_HPP_NAMESPACE::Fence fence,
8766  Dispatch const & d ) const
8767  {
8768  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8769 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8770  VULKAN_HPP_ASSERT( d.vkAcquireNextImageKHR && "Function <vkAcquireNextImageKHR> requires <VK_KHR_swapchain>" );
8771 # endif
8772 
8773  uint32_t imageIndex;
8774  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireNextImageKHR(
8775  m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
8776  resultCheck( result,
8777  VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR",
8778  { VULKAN_HPP_NAMESPACE::Result::eSuccess,
8779  VULKAN_HPP_NAMESPACE::Result::eTimeout,
8780  VULKAN_HPP_NAMESPACE::Result::eNotReady,
8781  VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
8782 
8783  return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
8784  }
8785 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8786 
8787  template <typename Dispatch>
8788  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,
8789  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8790  {
8791  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8792  return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
8793  }
8794 
8795 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8796  template <typename Dispatch>
8797  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,
8798  Dispatch const & d ) const
8799  {
8800  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8801 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8802  VULKAN_HPP_ASSERT( d.vkQueuePresentKHR && "Function <vkQueuePresentKHR> requires <VK_KHR_swapchain>" );
8803 # endif
8804 
8806  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) );
8807  resultCheck(
8808  result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
8809 
8810  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
8811  }
8812 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8813 
8814  template <typename Dispatch>
8815  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR(
8816  VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8817  {
8818  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8819  return static_cast<Result>(
8820  d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
8821  }
8822 
8823 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8824  template <typename Dispatch>
8826  Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
8827  {
8828  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8829 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8830  VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPresentCapabilitiesKHR &&
8831  "Function <vkGetDeviceGroupPresentCapabilitiesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
8832 # endif
8833 
8834  VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
8836  d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) );
8837  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
8838 
8839  return createResultValueType( result, deviceGroupPresentCapabilities );
8840  }
8841 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8842 
8843  template <typename Dispatch>
8844  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8846  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8847  {
8848  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8849  return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR(
8850  m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
8851  }
8852 
8853 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8854  template <typename Dispatch>
8856  Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
8857  {
8858  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8859 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8860  VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModesKHR &&
8861  "Function <vkGetDeviceGroupSurfacePresentModesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
8862 # endif
8863 
8865  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModesKHR(
8866  m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
8867  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
8868 
8869  return createResultValueType( result, modes );
8870  }
8871 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8872 
8873  template <typename Dispatch>
8874  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
8875  uint32_t * pRectCount,
8876  VULKAN_HPP_NAMESPACE::Rect2D * pRects,
8877  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8878  {
8879  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8880  return static_cast<Result>(
8881  d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
8882  }
8883 
8884 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8885  template <typename Rect2DAllocator, typename Dispatch>
8887  PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
8888  {
8889  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8890 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8891  VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR &&
8892  "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
8893 # endif
8894 
8895  std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects;
8896  uint32_t rectCount;
8898  do
8899  {
8900  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
8901  d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
8902  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount )
8903  {
8904  rects.resize( rectCount );
8905  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
8906  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) );
8907  }
8908  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
8909  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
8910  VULKAN_HPP_ASSERT( rectCount <= rects.size() );
8911  if ( rectCount < rects.size() )
8912  {
8913  rects.resize( rectCount );
8914  }
8915  return createResultValueType( result, rects );
8916  }
8917 
8918  template <typename Rect2DAllocator,
8919  typename Dispatch,
8920  typename std::enable_if<std::is_same<typename Rect2DAllocator::value_type, VULKAN_HPP_NAMESPACE::Rect2D>::value, int>::type>
8922  PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const
8923  {
8924  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8925 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8926  VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR &&
8927  "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
8928 # endif
8929 
8930  std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator );
8931  uint32_t rectCount;
8933  do
8934  {
8935  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
8936  d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
8937  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount )
8938  {
8939  rects.resize( rectCount );
8940  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
8941  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) );
8942  }
8943  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
8944  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
8945  VULKAN_HPP_ASSERT( rectCount <= rects.size() );
8946  if ( rectCount < rects.size() )
8947  {
8948  rects.resize( rectCount );
8949  }
8950  return createResultValueType( result, rects );
8951  }
8952 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8953 
8954  template <typename Dispatch>
8955  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
8956  uint32_t * pImageIndex,
8957  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8958  {
8959  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8960  return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
8961  }
8962 
8963 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8964  template <typename Dispatch>
8965  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,
8966  Dispatch const & d ) const
8967  {
8968  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8969 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
8970  VULKAN_HPP_ASSERT( d.vkAcquireNextImage2KHR && "Function <vkAcquireNextImage2KHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
8971 # endif
8972 
8973  uint32_t imageIndex;
8975  d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) );
8976  resultCheck( result,
8977  VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
8978  { VULKAN_HPP_NAMESPACE::Result::eSuccess,
8979  VULKAN_HPP_NAMESPACE::Result::eTimeout,
8980  VULKAN_HPP_NAMESPACE::Result::eNotReady,
8981  VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
8982 
8983  return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
8984  }
8985 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8986 
8987  //=== VK_KHR_display ===
8988 
8989  template <typename Dispatch>
8990  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount,
8991  VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
8992  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8993  {
8994  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8995  return static_cast<Result>(
8996  d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
8997  }
8998 
8999 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9000  template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
9002  PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
9003  {
9004  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9005 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9006  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" );
9007 # endif
9008 
9009  std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
9010  uint32_t propertyCount;
9012  do
9013  {
9014  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
9015  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
9016  {
9017  properties.resize( propertyCount );
9018  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
9019  d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
9020  }
9021  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
9022  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
9023  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
9024  if ( propertyCount < properties.size() )
9025  {
9026  properties.resize( propertyCount );
9027  }
9028  return createResultValueType( result, properties );
9029  }
9030 
9031  template <
9032  typename DisplayPropertiesKHRAllocator,
9033  typename Dispatch,
9034  typename std::enable_if<std::is_same<typename DisplayPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, int>::type>
9036  PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const
9037  {
9038  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9039 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9040  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" );
9041 # endif
9042 
9043  std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
9044  uint32_t propertyCount;
9046  do
9047  {
9048  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
9049  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
9050  {
9051  properties.resize( propertyCount );
9052  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
9053  d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
9054  }
9055  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
9056  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
9057  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
9058  if ( propertyCount < properties.size() )
9059  {
9060  properties.resize( propertyCount );
9061  }
9062  return createResultValueType( result, properties );
9063  }
9064 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9065 
9066  template <typename Dispatch>
9067  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount,
9068  VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
9069  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9070  {
9071  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9072  return static_cast<Result>(
9073  d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
9074  }
9075 
9076 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9077  template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
9080  PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
9081  {
9082  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9083 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9084  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" );
9085 # endif
9086 
9087  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
9088  uint32_t propertyCount;
9090  do
9091  {
9092  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
9093  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
9094  {
9095  properties.resize( propertyCount );
9096  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
9097  m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
9098  }
9099  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
9100  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
9101  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
9102  if ( propertyCount < properties.size() )
9103  {
9104  properties.resize( propertyCount );
9105  }
9106  return createResultValueType( result, properties );
9107  }
9108 
9109  template <
9110  typename DisplayPlanePropertiesKHRAllocator,
9111  typename Dispatch,
9112  typename std::enable_if<std::is_same<typename DisplayPlanePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value,
9113  int>::type>
9116  PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
9117  {
9118  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9119 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9120  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" );
9121 # endif
9122 
9123  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator );
9124  uint32_t propertyCount;
9126  do
9127  {
9128  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
9129  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
9130  {
9131  properties.resize( propertyCount );
9132  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
9133  m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
9134  }
9135  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
9136  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
9137  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
9138  if ( propertyCount < properties.size() )
9139  {
9140  properties.resize( propertyCount );
9141  }
9142  return createResultValueType( result, properties );
9143  }
9144 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9145 
9146  template <typename Dispatch>
9147  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
9148  uint32_t * pDisplayCount,
9149  VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
9150  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9151  {
9152  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9153  return static_cast<Result>(
9154  d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
9155  }
9156 
9157 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9158  template <typename DisplayKHRAllocator, typename Dispatch>
9160  PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
9161  {
9162  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9163 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9164  VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" );
9165 # endif
9166 
9167  std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays;
9168  uint32_t displayCount;
9170  do
9171  {
9172  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
9173  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount )
9174  {
9175  displays.resize( displayCount );
9176  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
9177  d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
9178  }
9179  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
9180  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
9181  VULKAN_HPP_ASSERT( displayCount <= displays.size() );
9182  if ( displayCount < displays.size() )
9183  {
9184  displays.resize( displayCount );
9185  }
9186  return createResultValueType( result, displays );
9187  }
9188 
9189  template <typename DisplayKHRAllocator,
9190  typename Dispatch,
9191  typename std::enable_if<std::is_same<typename DisplayKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayKHR>::value, int>::type>
9193  PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const
9194  {
9195  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9196 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9197  VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" );
9198 # endif
9199 
9200  std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
9201  uint32_t displayCount;
9203  do
9204  {
9205  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
9206  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount )
9207  {
9208  displays.resize( displayCount );
9209  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
9210  d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
9211  }
9212  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
9213  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
9214  VULKAN_HPP_ASSERT( displayCount <= displays.size() );
9215  if ( displayCount < displays.size() )
9216  {
9217  displays.resize( displayCount );
9218  }
9219  return createResultValueType( result, displays );
9220  }
9221 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9222 
9223  template <typename Dispatch>
9224  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
9225  uint32_t * pPropertyCount,
9226  VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
9227  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9228  {
9229  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9230  return static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
9231  m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
9232  }
9233 
9234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9235  template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
9238  PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
9239  {
9240  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9241 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9242  VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" );
9243 # endif
9244 
9245  std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
9246  uint32_t propertyCount;
9248  do
9249  {
9250  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
9251  d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
9252  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
9253  {
9254  properties.resize( propertyCount );
9255  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR(
9256  m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
9257  }
9258  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
9259  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
9260  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
9261  if ( propertyCount < properties.size() )
9262  {
9263  properties.resize( propertyCount );
9264  }
9265  return createResultValueType( result, properties );
9266  }
9267 
9268  template <typename DisplayModePropertiesKHRAllocator,
9269  typename Dispatch,
9270  typename std::enable_if<std::is_same<typename DisplayModePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value,
9271  int>::type>
9274  PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
9275  DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
9276  Dispatch const & d ) const
9277  {
9278  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9279 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9280  VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" );
9281 # endif
9282 
9283  std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator );
9284  uint32_t propertyCount;
9286  do
9287  {
9288  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
9289  d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
9290  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
9291  {
9292  properties.resize( propertyCount );
9293  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR(
9294  m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
9295  }
9296  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
9297  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
9298  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
9299  if ( propertyCount < properties.size() )
9300  {
9301  properties.resize( propertyCount );
9302  }
9303  return createResultValueType( result, properties );
9304  }
9305 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9306 
9307  template <typename Dispatch>
9308  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
9309  const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
9310  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9311  VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
9312  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9313  {
9314  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9315  return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice,
9316  static_cast<VkDisplayKHR>( display ),
9317  reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
9318  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9319  reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
9320  }
9321 
9322 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9323  template <typename Dispatch>
9325  PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
9326  const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
9328  Dispatch const & d ) const
9329  {
9330  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9331 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9332  VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" );
9333 # endif
9334 
9335  VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
9337  d.vkCreateDisplayModeKHR( m_physicalDevice,
9338  static_cast<VkDisplayKHR>( display ),
9339  reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
9340  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9341  reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
9342  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
9343 
9344  return createResultValueType( result, mode );
9345  }
9346 
9347 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9348  template <typename Dispatch>
9350  PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
9351  const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
9353  Dispatch const & d ) const
9354  {
9355  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9356 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9357  VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" );
9358 # endif
9359 
9360  VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
9362  d.vkCreateDisplayModeKHR( m_physicalDevice,
9363  static_cast<VkDisplayKHR>( display ),
9364  reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
9365  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9366  reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
9367  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" );
9368 
9369  return createResultValueType(
9371  }
9372 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9373 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9374 
9375  template <typename Dispatch>
9377  PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
9378  uint32_t planeIndex,
9379  VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
9380  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9381  {
9382  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9383  return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR(
9384  m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
9385  }
9386 
9387 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9388  template <typename Dispatch>
9390  PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const
9391  {
9392  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9393 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9394  VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilitiesKHR && "Function <vkGetDisplayPlaneCapabilitiesKHR> requires <VK_KHR_display>" );
9395 # endif
9396 
9397  VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
9398  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilitiesKHR(
9399  m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) );
9400  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
9401 
9402  return createResultValueType( result, capabilities );
9403  }
9404 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9405 
9406  template <typename Dispatch>
9407  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
9408  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9409  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
9410  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9411  {
9412  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9413  return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
9414  reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
9415  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9416  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
9417  }
9418 
9419 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9420  template <typename Dispatch>
9422  Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
9424  Dispatch const & d ) const
9425  {
9426  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9427 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9428  VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" );
9429 # endif
9430 
9431  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9432  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR(
9433  m_instance,
9434  reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
9435  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9436  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9437  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
9438 
9439  return createResultValueType( result, surface );
9440  }
9441 
9442 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9443  template <typename Dispatch>
9445  Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
9447  Dispatch const & d ) const
9448  {
9449  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9450 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9451  VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" );
9452 # endif
9453 
9454  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9455  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR(
9456  m_instance,
9457  reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
9458  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9459  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9460  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" );
9461 
9462  return createResultValueType(
9464  }
9465 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9466 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9467 
9468  //=== VK_KHR_display_swapchain ===
9469 
9470  template <typename Dispatch>
9471  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount,
9472  const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
9473  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9474  VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
9475  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9476  {
9477  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9478  return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device,
9479  swapchainCount,
9480  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ),
9481  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9482  reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
9483  }
9484 
9485 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9486  template <typename SwapchainKHRAllocator, typename Dispatch>
9488  Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
9490  Dispatch const & d ) const
9491  {
9492  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9493 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9494  VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" );
9495 # endif
9496 
9497  std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
9498  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR(
9499  m_device,
9500  createInfos.size(),
9501  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
9502  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9503  reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
9504  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
9505 
9506  return createResultValueType( result, swapchains );
9507  }
9508 
9509  template <typename SwapchainKHRAllocator,
9510  typename Dispatch,
9511  typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SwapchainKHR>::value, int>::type>
9513  Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
9515  SwapchainKHRAllocator & swapchainKHRAllocator,
9516  Dispatch const & d ) const
9517  {
9518  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9519 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9520  VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" );
9521 # endif
9522 
9523  std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
9524  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR(
9525  m_device,
9526  createInfos.size(),
9527  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
9528  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9529  reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
9530  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
9531 
9532  return createResultValueType( result, swapchains );
9533  }
9534 
9535  template <typename Dispatch>
9537  Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
9539  Dispatch const & d ) const
9540  {
9541  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9542 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9543  VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" );
9544 # endif
9545 
9546  VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
9547  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR(
9548  m_device,
9549  1,
9550  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
9551  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9552  reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
9553  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
9554 
9555  return createResultValueType( result, swapchain );
9556  }
9557 
9558 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9559  template <typename Dispatch, typename SwapchainKHRAllocator>
9562  Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
9564  Dispatch const & d ) const
9565  {
9566  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9567 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9568  VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" );
9569 # endif
9570 
9571  std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
9572  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR(
9573  m_device,
9574  createInfos.size(),
9575  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
9576  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9577  reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
9578  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
9579  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
9580  uniqueSwapchains.reserve( createInfos.size() );
9581  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
9582  for ( auto const & swapchain : swapchains )
9583  {
9584  uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
9585  }
9586  return createResultValueType( result, std::move( uniqueSwapchains ) );
9587  }
9588 
9589  template <typename Dispatch,
9590  typename SwapchainKHRAllocator,
9591  typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::value,
9592  int>::type>
9595  Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
9597  SwapchainKHRAllocator & swapchainKHRAllocator,
9598  Dispatch const & d ) const
9599  {
9600  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9601 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9602  VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" );
9603 # endif
9604 
9605  std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
9606  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR(
9607  m_device,
9608  createInfos.size(),
9609  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
9610  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9611  reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
9612  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
9613  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
9614  uniqueSwapchains.reserve( createInfos.size() );
9615  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
9616  for ( auto const & swapchain : swapchains )
9617  {
9618  uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
9619  }
9620  return createResultValueType( result, std::move( uniqueSwapchains ) );
9621  }
9622 
9623  template <typename Dispatch>
9625  Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
9627  Dispatch const & d ) const
9628  {
9629  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9630 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9631  VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" );
9632 # endif
9633 
9634  VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
9635  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR(
9636  m_device,
9637  1,
9638  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
9639  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9640  reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
9641  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" );
9642 
9643  return createResultValueType(
9645  }
9646 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9647 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9648 
9649 #if defined( VK_USE_PLATFORM_XLIB_KHR )
9650  //=== VK_KHR_xlib_surface ===
9651 
9652  template <typename Dispatch>
9653  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
9654  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9655  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
9656  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9657  {
9658  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9659  return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance,
9660  reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
9661  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9662  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
9663  }
9664 
9665 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9666  template <typename Dispatch>
9667  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
9668  Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
9669  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9670  Dispatch const & d ) const
9671  {
9672  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9673 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9674  VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" );
9675 # endif
9676 
9677  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9679  d.vkCreateXlibSurfaceKHR( m_instance,
9680  reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
9681  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9682  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9683  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
9684 
9685  return createResultValueType( result, surface );
9686  }
9687 
9688 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9689  template <typename Dispatch>
9690  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
9691  Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
9692  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9693  Dispatch const & d ) const
9694  {
9695  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9696 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9697  VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" );
9698 # endif
9699 
9700  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9702  d.vkCreateXlibSurfaceKHR( m_instance,
9703  reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
9704  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9705  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9706  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" );
9707 
9708  return createResultValueType(
9709  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
9710  }
9711 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9712 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9713 
9714  template <typename Dispatch>
9716  PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9717  {
9718  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9719  return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
9720  }
9721 
9722 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9723  template <typename Dispatch>
9725  PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9726  {
9727  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9728 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9729  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXlibPresentationSupportKHR &&
9730  "Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> requires <VK_KHR_xlib_surface>" );
9731 # endif
9732 
9733  VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
9734 
9735  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
9736  }
9737 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9738 #endif /*VK_USE_PLATFORM_XLIB_KHR*/
9739 
9740 #if defined( VK_USE_PLATFORM_XCB_KHR )
9741  //=== VK_KHR_xcb_surface ===
9742 
9743  template <typename Dispatch>
9744  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
9745  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9746  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
9747  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9748  {
9749  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9750  return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance,
9751  reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
9752  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9753  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
9754  }
9755 
9756 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9757  template <typename Dispatch>
9758  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
9759  Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
9760  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9761  Dispatch const & d ) const
9762  {
9763  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9764 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9765  VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" );
9766 # endif
9767 
9768  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9770  d.vkCreateXcbSurfaceKHR( m_instance,
9771  reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
9772  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9773  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9774  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
9775 
9776  return createResultValueType( result, surface );
9777  }
9778 
9779 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9780  template <typename Dispatch>
9781  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
9782  Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
9783  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9784  Dispatch const & d ) const
9785  {
9786  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9787 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9788  VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" );
9789 # endif
9790 
9791  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9793  d.vkCreateXcbSurfaceKHR( m_instance,
9794  reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
9795  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9796  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9797  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" );
9798 
9799  return createResultValueType(
9800  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
9801  }
9802 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9803 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9804 
9805  template <typename Dispatch>
9806  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
9807  xcb_connection_t * connection,
9808  xcb_visualid_t visual_id,
9809  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9810  {
9811  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9812  return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
9813  }
9814 
9815 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9816  template <typename Dispatch>
9817  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
9818  xcb_connection_t & connection,
9819  xcb_visualid_t visual_id,
9820  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9821  {
9822  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9823 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9824  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXcbPresentationSupportKHR &&
9825  "Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> requires <VK_KHR_xcb_surface>" );
9826 # endif
9827 
9828  VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
9829 
9830  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
9831  }
9832 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9833 #endif /*VK_USE_PLATFORM_XCB_KHR*/
9834 
9835 #if defined( VK_USE_PLATFORM_WAYLAND_KHR )
9836  //=== VK_KHR_wayland_surface ===
9837 
9838  template <typename Dispatch>
9839  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
9840  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9841  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
9842  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9843  {
9844  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9845  return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance,
9846  reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
9847  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9848  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
9849  }
9850 
9851 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9852  template <typename Dispatch>
9853  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
9854  Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
9855  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9856  Dispatch const & d ) const
9857  {
9858  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9859 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9860  VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" );
9861 # endif
9862 
9863  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9864  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR(
9865  m_instance,
9866  reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
9867  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9868  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9869  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
9870 
9871  return createResultValueType( result, surface );
9872  }
9873 
9874 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9875  template <typename Dispatch>
9876  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
9877  Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
9878  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9879  Dispatch const & d ) const
9880  {
9881  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9882 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9883  VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" );
9884 # endif
9885 
9886  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9887  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR(
9888  m_instance,
9889  reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
9890  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9891  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9892  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" );
9893 
9894  return createResultValueType(
9895  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
9896  }
9897 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9898 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9899 
9900  template <typename Dispatch>
9901  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
9902  struct wl_display * display,
9903  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9904  {
9905  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9906  return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
9907  }
9908 
9909 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9910  template <typename Dispatch>
9912  PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9913  {
9914  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9915 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9916  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR &&
9917  "Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> requires <VK_KHR_wayland_surface>" );
9918 # endif
9919 
9920  VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
9921 
9922  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
9923  }
9924 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9925 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
9926 
9927 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
9928  //=== VK_KHR_android_surface ===
9929 
9930  template <typename Dispatch>
9931  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
9932  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9933  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
9934  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9935  {
9936  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9937  return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance,
9938  reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
9939  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9940  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
9941  }
9942 
9943 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9944  template <typename Dispatch>
9945  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
9946  Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
9947  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9948  Dispatch const & d ) const
9949  {
9950  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9951 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9952  VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" );
9953 # endif
9954 
9955  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9956  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR(
9957  m_instance,
9958  reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
9959  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9960  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9961  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
9962 
9963  return createResultValueType( result, surface );
9964  }
9965 
9966 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9967  template <typename Dispatch>
9968  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
9969  Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
9970  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9971  Dispatch const & d ) const
9972  {
9973  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9974 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
9975  VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" );
9976 # endif
9977 
9978  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
9979  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR(
9980  m_instance,
9981  reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
9982  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9983  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
9984  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" );
9985 
9986  return createResultValueType(
9987  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
9988  }
9989 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9990 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9991 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
9992 
9993 #if defined( VK_USE_PLATFORM_WIN32_KHR )
9994  //=== VK_KHR_win32_surface ===
9995 
9996  template <typename Dispatch>
9997  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
9998  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9999  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
10000  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10001  {
10002  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10003  return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance,
10004  reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
10005  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10006  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
10007  }
10008 
10009 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10010  template <typename Dispatch>
10011  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
10012  Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
10013  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
10014  Dispatch const & d ) const
10015  {
10016  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10017 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10018  VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" );
10019 # endif
10020 
10021  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
10023  d.vkCreateWin32SurfaceKHR( m_instance,
10024  reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
10025  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10026  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
10027  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
10028 
10029  return createResultValueType( result, surface );
10030  }
10031 
10032 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10033  template <typename Dispatch>
10034  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
10035  Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
10036  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
10037  Dispatch const & d ) const
10038  {
10039  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10040 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10041  VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" );
10042 # endif
10043 
10044  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
10046  d.vkCreateWin32SurfaceKHR( m_instance,
10047  reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
10048  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10049  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
10050  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" );
10051 
10052  return createResultValueType(
10053  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
10054  }
10055 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
10056 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10057 
10058  template <typename Dispatch>
10059  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10060  {
10061  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10062  return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
10063  }
10064 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
10065 
10066  //=== VK_EXT_debug_report ===
10067 
10068  template <typename Dispatch>
10070  Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
10071  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10072  VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
10073  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10074  {
10075  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10076  return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance,
10077  reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
10078  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10079  reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
10080  }
10081 
10082 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10083  template <typename Dispatch>
10085  Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
10087  Dispatch const & d ) const
10088  {
10089  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10090 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10091  VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" );
10092 # endif
10093 
10094  VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
10095  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT(
10096  m_instance,
10097  reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
10098  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10099  reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
10100  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
10101 
10102  return createResultValueType( result, callback );
10103  }
10104 
10105 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10106  template <typename Dispatch>
10108  Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
10110  Dispatch const & d ) const
10111  {
10112  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10113 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10114  VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" );
10115 # endif
10116 
10117  VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
10118  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT(
10119  m_instance,
10120  reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
10121  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10122  reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
10123  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" );
10124 
10125  return createResultValueType(
10127  }
10128 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
10129 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10130 
10131  template <typename Dispatch>
10132  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
10133  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10134  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10135  {
10136  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10137  d.vkDestroyDebugReportCallbackEXT(
10138  m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10139  }
10140 
10141 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10142  template <typename Dispatch>
10143  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
10145  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10146  {
10147  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10148 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10149  VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" );
10150 # endif
10151 
10152  d.vkDestroyDebugReportCallbackEXT(
10153  m_instance,
10154  static_cast<VkDebugReportCallbackEXT>( callback ),
10155  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10156  }
10157 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10158 
10159  template <typename Dispatch>
10160  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
10161  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10162  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10163  {
10164  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10165  d.vkDestroyDebugReportCallbackEXT(
10166  m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10167  }
10168 
10169 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10170  template <typename Dispatch>
10171  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
10173  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10174  {
10175  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10176 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10177  VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" );
10178 # endif
10179 
10180  d.vkDestroyDebugReportCallbackEXT(
10181  m_instance,
10182  static_cast<VkDebugReportCallbackEXT>( callback ),
10183  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10184  }
10185 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10186 
10187  template <typename Dispatch>
10188  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
10190  uint64_t object,
10191  size_t location,
10192  int32_t messageCode,
10193  const char * pLayerPrefix,
10194  const char * pMessage,
10195  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10196  {
10197  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10198  d.vkDebugReportMessageEXT( m_instance,
10199  static_cast<VkDebugReportFlagsEXT>( flags ),
10200  static_cast<VkDebugReportObjectTypeEXT>( objectType_ ),
10201  object,
10202  location,
10203  messageCode,
10204  pLayerPrefix,
10205  pMessage );
10206  }
10207 
10208 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10209  template <typename Dispatch>
10210  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
10212  uint64_t object,
10213  size_t location,
10214  int32_t messageCode,
10215  const std::string & layerPrefix,
10216  const std::string & message,
10217  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10218  {
10219  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10220 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10221  VULKAN_HPP_ASSERT( d.vkDebugReportMessageEXT && "Function <vkDebugReportMessageEXT> requires <VK_EXT_debug_report>" );
10222 # endif
10223 
10224  d.vkDebugReportMessageEXT( m_instance,
10225  static_cast<VkDebugReportFlagsEXT>( flags ),
10226  static_cast<VkDebugReportObjectTypeEXT>( objectType_ ),
10227  object,
10228  location,
10229  messageCode,
10230  layerPrefix.c_str(),
10231  message.c_str() );
10232  }
10233 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10234 
10235  //=== VK_EXT_debug_marker ===
10236 
10237  template <typename Dispatch>
10238  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,
10239  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10240  {
10241  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10242  return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
10243  }
10244 
10245 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10246  template <typename Dispatch>
10248  Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
10249  {
10250  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10251 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10252  VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectTagEXT && "Function <vkDebugMarkerSetObjectTagEXT> requires <VK_EXT_debug_marker>" );
10253 # endif
10254 
10256  d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) );
10257  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
10258 
10259  return createResultValueType( result );
10260  }
10261 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10262 
10263  template <typename Dispatch>
10264  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,
10265  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10266  {
10267  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10268  return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
10269  }
10270 
10271 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10272  template <typename Dispatch>
10274  Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
10275  {
10276  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10277 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10278  VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectNameEXT && "Function <vkDebugMarkerSetObjectNameEXT> requires <VK_EXT_debug_marker>" );
10279 # endif
10280 
10282  d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) );
10283  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
10284 
10285  return createResultValueType( result );
10286  }
10287 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10288 
10289  template <typename Dispatch>
10290  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
10291  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10292  {
10293  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10294  d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
10295  }
10296 
10297 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10298  template <typename Dispatch>
10299  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
10300  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10301  {
10302  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10303 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10304  VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerBeginEXT && "Function <vkCmdDebugMarkerBeginEXT> requires <VK_EXT_debug_marker>" );
10305 # endif
10306 
10307  d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
10308  }
10309 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10310 
10311  template <typename Dispatch>
10312  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10313  {
10314  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10315  d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
10316  }
10317 
10318  template <typename Dispatch>
10319  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
10320  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10321  {
10322  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10323  d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
10324  }
10325 
10326 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10327  template <typename Dispatch>
10328  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
10329  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10330  {
10331  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10332 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10333  VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerInsertEXT && "Function <vkCmdDebugMarkerInsertEXT> requires <VK_EXT_debug_marker>" );
10334 # endif
10335 
10336  d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
10337  }
10338 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10339 
10340  //=== VK_KHR_video_queue ===
10341 
10342  template <typename Dispatch>
10343  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,
10344  VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
10345  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10346  {
10347  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10348  return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
10349  m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
10350  }
10351 
10352 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10353  template <typename Dispatch>
10355  PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
10356  {
10357  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10358 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10359  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" );
10360 # endif
10361 
10362  VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
10363  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
10364  m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
10365  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
10366 
10367  return createResultValueType( result, capabilities );
10368  }
10369 
10370  template <typename X, typename Y, typename... Z, typename Dispatch>
10371  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
10372  PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
10373  {
10374  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10375 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10376  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" );
10377 # endif
10378 
10379  StructureChain<X, Y, Z...> structureChain;
10380  VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
10381  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
10382  m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
10383  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
10384 
10385  return createResultValueType( result, structureChain );
10386  }
10387 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10388 
10389  template <typename Dispatch>
10391  PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
10392  uint32_t * pVideoFormatPropertyCount,
10393  VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
10394  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10395  {
10396  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10397  return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
10398  reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
10399  pVideoFormatPropertyCount,
10400  reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
10401  }
10402 
10403 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10404  template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch>
10407  PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const
10408  {
10409  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10410 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10411  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR &&
10412  "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" );
10413 # endif
10414 
10415  std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
10416  uint32_t videoFormatPropertyCount;
10418  do
10419  {
10420  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
10421  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) );
10422  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount )
10423  {
10424  videoFormatProperties.resize( videoFormatPropertyCount );
10425  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
10426  d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
10427  reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
10428  &videoFormatPropertyCount,
10429  reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
10430  }
10431  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
10432  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
10433  VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
10434  if ( videoFormatPropertyCount < videoFormatProperties.size() )
10435  {
10436  videoFormatProperties.resize( videoFormatPropertyCount );
10437  }
10438  return createResultValueType( result, videoFormatProperties );
10439  }
10440 
10441  template <typename VideoFormatPropertiesKHRAllocator,
10442  typename Dispatch,
10443  typename std::enable_if<std::is_same<typename VideoFormatPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value,
10444  int>::type>
10447  PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
10448  VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
10449  Dispatch const & d ) const
10450  {
10451  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10452 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10453  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR &&
10454  "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" );
10455 # endif
10456 
10457  std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator );
10458  uint32_t videoFormatPropertyCount;
10460  do
10461  {
10462  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
10463  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) );
10464  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount )
10465  {
10466  videoFormatProperties.resize( videoFormatPropertyCount );
10467  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
10468  d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
10469  reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
10470  &videoFormatPropertyCount,
10471  reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
10472  }
10473  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
10474  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
10475  VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
10476  if ( videoFormatPropertyCount < videoFormatProperties.size() )
10477  {
10478  videoFormatProperties.resize( videoFormatPropertyCount );
10479  }
10480  return createResultValueType( result, videoFormatProperties );
10481  }
10482 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10483 
10484  template <typename Dispatch>
10485  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
10486  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10487  VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
10488  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10489  {
10490  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10491  return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device,
10492  reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ),
10493  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10494  reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
10495  }
10496 
10497 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10498  template <typename Dispatch>
10500  Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
10502  Dispatch const & d ) const
10503  {
10504  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10505 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10506  VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" );
10507 # endif
10508 
10509  VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
10511  d.vkCreateVideoSessionKHR( m_device,
10512  reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
10513  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10514  reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) );
10515  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" );
10516 
10517  return createResultValueType( result, videoSession );
10518  }
10519 
10520 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10521  template <typename Dispatch>
10523  Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
10525  Dispatch const & d ) const
10526  {
10527  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10528 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10529  VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" );
10530 # endif
10531 
10532  VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
10534  d.vkCreateVideoSessionKHR( m_device,
10535  reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
10536  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10537  reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) );
10538  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" );
10539 
10540  return createResultValueType(
10542  }
10543 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
10544 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10545 
10546  template <typename Dispatch>
10547  VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
10548  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10549  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10550  {
10551  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10552  d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10553  }
10554 
10555 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10556  template <typename Dispatch>
10557  VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
10559  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10560  {
10561  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10562 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10563  VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" );
10564 # endif
10565 
10566  d.vkDestroyVideoSessionKHR(
10567  m_device,
10568  static_cast<VkVideoSessionKHR>( videoSession ),
10569  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10570  }
10571 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10572 
10573  template <typename Dispatch>
10574  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
10575  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10576  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10577  {
10578  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10579  d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10580  }
10581 
10582 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10583  template <typename Dispatch>
10584  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
10586  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10587  {
10588  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10589 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10590  VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" );
10591 # endif
10592 
10593  d.vkDestroyVideoSessionKHR(
10594  m_device,
10595  static_cast<VkVideoSessionKHR>( videoSession ),
10596  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10597  }
10598 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10599 
10600  template <typename Dispatch>
10602  Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
10603  uint32_t * pMemoryRequirementsCount,
10604  VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,
10605  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10606  {
10607  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10608  return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
10609  static_cast<VkVideoSessionKHR>( videoSession ),
10610  pMemoryRequirementsCount,
10611  reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) );
10612  }
10613 
10614 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10615  template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch>
10618  Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const
10619  {
10620  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10621 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10622  VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" );
10623 # endif
10624 
10625  std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements;
10626  uint32_t memoryRequirementsCount;
10628  do
10629  {
10630  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
10631  d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) );
10632  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount )
10633  {
10634  memoryRequirements.resize( memoryRequirementsCount );
10635  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
10636  d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
10637  static_cast<VkVideoSessionKHR>( videoSession ),
10638  &memoryRequirementsCount,
10639  reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) );
10640  }
10641  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
10642 
10643  VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
10644  if ( memoryRequirementsCount < memoryRequirements.size() )
10645  {
10646  memoryRequirements.resize( memoryRequirementsCount );
10647  }
10648  return memoryRequirements;
10649  }
10650 
10651  template <typename VideoSessionMemoryRequirementsKHRAllocator,
10652  typename Dispatch,
10653  typename std::enable_if<
10654  std::is_same<typename VideoSessionMemoryRequirementsKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value,
10655  int>::type>
10658  Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
10659  VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,
10660  Dispatch const & d ) const
10661  {
10662  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10663 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10664  VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" );
10665 # endif
10666 
10667  std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements(
10668  videoSessionMemoryRequirementsKHRAllocator );
10669  uint32_t memoryRequirementsCount;
10671  do
10672  {
10673  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
10674  d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) );
10675  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount )
10676  {
10677  memoryRequirements.resize( memoryRequirementsCount );
10678  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
10679  d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
10680  static_cast<VkVideoSessionKHR>( videoSession ),
10681  &memoryRequirementsCount,
10682  reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) );
10683  }
10684  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
10685 
10686  VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
10687  if ( memoryRequirementsCount < memoryRequirements.size() )
10688  {
10689  memoryRequirements.resize( memoryRequirementsCount );
10690  }
10691  return memoryRequirements;
10692  }
10693 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10694 
10695  template <typename Dispatch>
10697  Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
10698  uint32_t bindSessionMemoryInfoCount,
10699  const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,
10700  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10701  {
10702  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10703  return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device,
10704  static_cast<VkVideoSessionKHR>( videoSession ),
10705  bindSessionMemoryInfoCount,
10706  reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) );
10707  }
10708 
10709 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10710  template <typename Dispatch>
10712  VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
10713  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos,
10714  Dispatch const & d ) const
10715  {
10716  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10717 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10718  VULKAN_HPP_ASSERT( d.vkBindVideoSessionMemoryKHR && "Function <vkBindVideoSessionMemoryKHR> requires <VK_KHR_video_queue>" );
10719 # endif
10720 
10722  d.vkBindVideoSessionMemoryKHR( m_device,
10723  static_cast<VkVideoSessionKHR>( videoSession ),
10724  bindSessionMemoryInfos.size(),
10725  reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) ) );
10726  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
10727 
10728  return createResultValueType( result );
10729  }
10730 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10731 
10732  template <typename Dispatch>
10734  Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
10735  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10736  VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
10737  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10738  {
10739  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10740  return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device,
10741  reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ),
10742  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10743  reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
10744  }
10745 
10746 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10747  template <typename Dispatch>
10749  Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
10751  Dispatch const & d ) const
10752  {
10753  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10754 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10755  VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" );
10756 # endif
10757 
10758  VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
10759  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR(
10760  m_device,
10761  reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
10762  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10763  reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) );
10764  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" );
10765 
10766  return createResultValueType( result, videoSessionParameters );
10767  }
10768 
10769 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10770  template <typename Dispatch>
10772  Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
10774  Dispatch const & d ) const
10775  {
10776  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10777 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10778  VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" );
10779 # endif
10780 
10781  VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
10782  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR(
10783  m_device,
10784  reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
10785  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10786  reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) );
10787  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" );
10788 
10789  return createResultValueType( result,
10791  videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
10792  }
10793 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
10794 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10795 
10796  template <typename Dispatch>
10798  Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
10799  const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
10800  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10801  {
10802  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10803  return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device,
10804  static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
10805  reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
10806  }
10807 
10808 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10809  template <typename Dispatch>
10811  Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
10812  const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,
10813  Dispatch const & d ) const
10814  {
10815  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10816 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10817  VULKAN_HPP_ASSERT( d.vkUpdateVideoSessionParametersKHR && "Function <vkUpdateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" );
10818 # endif
10819 
10821  d.vkUpdateVideoSessionParametersKHR( m_device,
10822  static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
10823  reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) );
10824  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
10825 
10826  return createResultValueType( result );
10827  }
10828 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10829 
10830  template <typename Dispatch>
10831  VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
10832  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10833  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10834  {
10835  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10836  d.vkDestroyVideoSessionParametersKHR(
10837  m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10838  }
10839 
10840 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10841  template <typename Dispatch>
10842  VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
10844  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10845  {
10846  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10847 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10848  VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" );
10849 # endif
10850 
10851  d.vkDestroyVideoSessionParametersKHR(
10852  m_device,
10853  static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
10854  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10855  }
10856 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10857 
10858  template <typename Dispatch>
10859  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
10860  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10861  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10862  {
10863  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10864  d.vkDestroyVideoSessionParametersKHR(
10865  m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10866  }
10867 
10868 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10869  template <typename Dispatch>
10870  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
10872  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10873  {
10874  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10875 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10876  VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" );
10877 # endif
10878 
10879  d.vkDestroyVideoSessionParametersKHR(
10880  m_device,
10881  static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
10882  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10883  }
10884 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10885 
10886  template <typename Dispatch>
10887  VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
10888  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10889  {
10890  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10891  d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
10892  }
10893 
10894 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10895  template <typename Dispatch>
10896  VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,
10897  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10898  {
10899  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10900 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10901  VULKAN_HPP_ASSERT( d.vkCmdBeginVideoCodingKHR && "Function <vkCmdBeginVideoCodingKHR> requires <VK_KHR_video_queue>" );
10902 # endif
10903 
10904  d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
10905  }
10906 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10907 
10908  template <typename Dispatch>
10909  VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
10910  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10911  {
10912  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10913  d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
10914  }
10915 
10916 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10917  template <typename Dispatch>
10918  VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,
10919  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10920  {
10921  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10922 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10923  VULKAN_HPP_ASSERT( d.vkCmdEndVideoCodingKHR && "Function <vkCmdEndVideoCodingKHR> requires <VK_KHR_video_queue>" );
10924 # endif
10925 
10926  d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
10927  }
10928 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10929 
10930  template <typename Dispatch>
10931  VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
10932  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10933  {
10934  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10935  d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
10936  }
10937 
10938 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10939  template <typename Dispatch>
10940  VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,
10941  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10942  {
10943  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10944 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10945  VULKAN_HPP_ASSERT( d.vkCmdControlVideoCodingKHR && "Function <vkCmdControlVideoCodingKHR> requires <VK_KHR_video_queue>" );
10946 # endif
10947 
10948  d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
10949  }
10950 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10951 
10952  //=== VK_KHR_video_decode_queue ===
10953 
10954  template <typename Dispatch>
10955  VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,
10956  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10957  {
10958  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10959  d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) );
10960  }
10961 
10962 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10963  template <typename Dispatch>
10964  VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,
10965  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10966  {
10967  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10968 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
10969  VULKAN_HPP_ASSERT( d.vkCmdDecodeVideoKHR && "Function <vkCmdDecodeVideoKHR> requires <VK_KHR_video_decode_queue>" );
10970 # endif
10971 
10972  d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
10973  }
10974 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10975 
10976  //=== VK_EXT_transform_feedback ===
10977 
10978  template <typename Dispatch>
10979  VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
10980  uint32_t bindingCount,
10981  const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
10982  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
10983  const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
10984  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10985  {
10986  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10987  d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
10988  firstBinding,
10989  bindingCount,
10990  reinterpret_cast<const VkBuffer *>( pBuffers ),
10991  reinterpret_cast<const VkDeviceSize *>( pOffsets ),
10992  reinterpret_cast<const VkDeviceSize *>( pSizes ) );
10993  }
10994 
10995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10996  template <typename Dispatch>
10997  VULKAN_HPP_INLINE void
10998  CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
10999  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
11000  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
11001  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
11002  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
11003  {
11004  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11005 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11006  VULKAN_HPP_ASSERT( d.vkCmdBindTransformFeedbackBuffersEXT && "Function <vkCmdBindTransformFeedbackBuffersEXT> requires <VK_EXT_transform_feedback>" );
11007 # endif
11008 # ifdef VULKAN_HPP_NO_EXCEPTIONS
11009  VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
11010  VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
11011 # else
11012  if ( buffers.size() != offsets.size() )
11013  {
11014  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
11015  }
11016  if ( !sizes.empty() && buffers.size() != sizes.size() )
11017  {
11018  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
11019  }
11020 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
11021 
11022  d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
11023  firstBinding,
11024  buffers.size(),
11025  reinterpret_cast<const VkBuffer *>( buffers.data() ),
11026  reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
11027  reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
11028  }
11029 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11030 
11031  template <typename Dispatch>
11032  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
11033  uint32_t counterBufferCount,
11034  const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
11035  const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
11036  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11037  {
11038  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11039  d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
11040  firstCounterBuffer,
11041  counterBufferCount,
11042  reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
11043  reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
11044  }
11045 
11046 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11047  template <typename Dispatch>
11048  VULKAN_HPP_INLINE void
11049  CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
11050  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
11051  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
11052  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
11053  {
11054  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11055 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11056  VULKAN_HPP_ASSERT( d.vkCmdBeginTransformFeedbackEXT && "Function <vkCmdBeginTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" );
11057 # endif
11058 # ifdef VULKAN_HPP_NO_EXCEPTIONS
11059  VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
11060 # else
11061  if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
11062  {
11063  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
11064  }
11065 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
11066 
11067  d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
11068  firstCounterBuffer,
11069  counterBuffers.size(),
11070  reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
11071  reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
11072  }
11073 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11074 
11075  template <typename Dispatch>
11076  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
11077  uint32_t counterBufferCount,
11078  const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
11079  const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
11080  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11081  {
11082  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11083  d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
11084  firstCounterBuffer,
11085  counterBufferCount,
11086  reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
11087  reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
11088  }
11089 
11090 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11091  template <typename Dispatch>
11092  VULKAN_HPP_INLINE void
11093  CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
11094  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
11095  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
11096  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
11097  {
11098  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11099 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11100  VULKAN_HPP_ASSERT( d.vkCmdEndTransformFeedbackEXT && "Function <vkCmdEndTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" );
11101 # endif
11102 # ifdef VULKAN_HPP_NO_EXCEPTIONS
11103  VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
11104 # else
11105  if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
11106  {
11107  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
11108  }
11109 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
11110 
11111  d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
11112  firstCounterBuffer,
11113  counterBuffers.size(),
11114  reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
11115  reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
11116  }
11117 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11118 
11119  template <typename Dispatch>
11120  VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
11121  uint32_t query,
11123  uint32_t index,
11124  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11125  {
11126  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11127  d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
11128  }
11129 
11130  template <typename Dispatch>
11131  VULKAN_HPP_INLINE void
11132  CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11133  {
11134  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11135  d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
11136  }
11137 
11138  template <typename Dispatch>
11139  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount,
11140  uint32_t firstInstance,
11141  VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
11142  VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
11143  uint32_t counterOffset,
11144  uint32_t vertexStride,
11145  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11146  {
11147  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11148  d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
11149  instanceCount,
11150  firstInstance,
11151  static_cast<VkBuffer>( counterBuffer ),
11152  static_cast<VkDeviceSize>( counterBufferOffset ),
11153  counterOffset,
11154  vertexStride );
11155  }
11156 
11157  //=== VK_NVX_binary_import ===
11158 
11159  template <typename Dispatch>
11160  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
11161  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11162  VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
11163  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11164  {
11165  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11166  return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
11167  reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ),
11168  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11169  reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
11170  }
11171 
11172 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11173  template <typename Dispatch>
11175  Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
11177  Dispatch const & d ) const
11178  {
11179  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11180 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11181  VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" );
11182 # endif
11183 
11184  VULKAN_HPP_NAMESPACE::CuModuleNVX module;
11186  d.vkCreateCuModuleNVX( m_device,
11187  reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
11188  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11189  reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
11190  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
11191 
11192  return createResultValueType( result, module );
11193  }
11194 
11195 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11196  template <typename Dispatch>
11198  Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
11200  Dispatch const & d ) const
11201  {
11202  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11203 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11204  VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" );
11205 # endif
11206 
11207  VULKAN_HPP_NAMESPACE::CuModuleNVX module;
11209  d.vkCreateCuModuleNVX( m_device,
11210  reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
11211  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11212  reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
11213  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" );
11214 
11215  return createResultValueType( result,
11217  }
11218 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
11219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11220 
11221  template <typename Dispatch>
11222  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
11223  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11224  VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
11225  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11226  {
11227  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11228  return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device,
11229  reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
11230  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11231  reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
11232  }
11233 
11234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11235  template <typename Dispatch>
11237  Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
11239  Dispatch const & d ) const
11240  {
11241  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11242 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11243  VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" );
11244 # endif
11245 
11246  VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
11248  d.vkCreateCuFunctionNVX( m_device,
11249  reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
11250  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11251  reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
11252  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
11253 
11254  return createResultValueType( result, function );
11255  }
11256 
11257 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11258  template <typename Dispatch>
11260  Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
11262  Dispatch const & d ) const
11263  {
11264  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11265 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11266  VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" );
11267 # endif
11268 
11269  VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
11271  d.vkCreateCuFunctionNVX( m_device,
11272  reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
11273  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11274  reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
11275  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" );
11276 
11277  return createResultValueType(
11279  }
11280 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
11281 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11282 
11283  template <typename Dispatch>
11284  VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
11285  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11286  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11287  {
11288  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11289  d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
11290  }
11291 
11292 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11293  template <typename Dispatch>
11294  VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
11296  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11297  {
11298  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11299 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11300  VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" );
11301 # endif
11302 
11303  d.vkDestroyCuModuleNVX( m_device,
11304  static_cast<VkCuModuleNVX>( module ),
11305  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
11306  }
11307 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11308 
11309  template <typename Dispatch>
11310  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
11311  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11312  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11313  {
11314  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11315  d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
11316  }
11317 
11318 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11319  template <typename Dispatch>
11320  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
11322  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11323  {
11324  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11325 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11326  VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" );
11327 # endif
11328 
11329  d.vkDestroyCuModuleNVX( m_device,
11330  static_cast<VkCuModuleNVX>( module ),
11331  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
11332  }
11333 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11334 
11335  template <typename Dispatch>
11336  VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
11337  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11338  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11339  {
11340  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11341  d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
11342  }
11343 
11344 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11345  template <typename Dispatch>
11346  VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
11348  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11349  {
11350  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11351 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11352  VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" );
11353 # endif
11354 
11355  d.vkDestroyCuFunctionNVX( m_device,
11356  static_cast<VkCuFunctionNVX>( function ),
11357  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
11358  }
11359 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11360 
11361  template <typename Dispatch>
11362  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
11363  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11364  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11365  {
11366  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11367  d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
11368  }
11369 
11370 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11371  template <typename Dispatch>
11372  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
11374  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11375  {
11376  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11377 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11378  VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" );
11379 # endif
11380 
11381  d.vkDestroyCuFunctionNVX( m_device,
11382  static_cast<VkCuFunctionNVX>( function ),
11383  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
11384  }
11385 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11386 
11387  template <typename Dispatch>
11388  VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
11389  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11390  {
11391  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11392  d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
11393  }
11394 
11395 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11396  template <typename Dispatch>
11397  VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo,
11398  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11399  {
11400  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11401 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11402  VULKAN_HPP_ASSERT( d.vkCmdCuLaunchKernelNVX && "Function <vkCmdCuLaunchKernelNVX> requires <VK_NVX_binary_import>" );
11403 # endif
11404 
11405  d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
11406  }
11407 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11408 
11409  //=== VK_NVX_image_view_handle ===
11410 
11411  template <typename Dispatch>
11412  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
11413  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11414  {
11415  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11416  return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
11417  }
11418 
11419 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11420  template <typename Dispatch>
11421  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,
11422  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11423  {
11424  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11425 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11426  VULKAN_HPP_ASSERT( d.vkGetImageViewHandleNVX && "Function <vkGetImageViewHandleNVX> requires <VK_NVX_image_view_handle>" );
11427 # endif
11428 
11429  uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
11430 
11431  return result;
11432  }
11433 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11434 
11435  template <typename Dispatch>
11436  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
11437  VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
11438  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11439  {
11440  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11441  return static_cast<Result>(
11442  d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
11443  }
11444 
11445 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11446  template <typename Dispatch>
11448  Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
11449  {
11450  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11451 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11452  VULKAN_HPP_ASSERT( d.vkGetImageViewAddressNVX && "Function <vkGetImageViewAddressNVX> requires <VK_NVX_image_view_handle>" );
11453 # endif
11454 
11455  VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
11457  d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) );
11458  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
11459 
11460  return createResultValueType( result, properties );
11461  }
11462 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11463 
11464  //=== VK_AMD_draw_indirect_count ===
11465 
11466  template <typename Dispatch>
11467  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
11469  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
11470  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
11471  uint32_t maxDrawCount,
11472  uint32_t stride,
11473  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11474  {
11475  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11476  d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
11477  static_cast<VkBuffer>( buffer ),
11478  static_cast<VkDeviceSize>( offset ),
11479  static_cast<VkBuffer>( countBuffer ),
11480  static_cast<VkDeviceSize>( countBufferOffset ),
11481  maxDrawCount,
11482  stride );
11483  }
11484 
11485  template <typename Dispatch>
11486  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
11488  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
11489  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
11490  uint32_t maxDrawCount,
11491  uint32_t stride,
11492  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11493  {
11494  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11495  d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
11496  static_cast<VkBuffer>( buffer ),
11497  static_cast<VkDeviceSize>( offset ),
11498  static_cast<VkBuffer>( countBuffer ),
11499  static_cast<VkDeviceSize>( countBufferOffset ),
11500  maxDrawCount,
11501  stride );
11502  }
11503 
11504  //=== VK_AMD_shader_info ===
11505 
11506  template <typename Dispatch>
11507  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
11510  size_t * pInfoSize,
11511  void * pInfo,
11512  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11513  {
11514  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11515  return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
11516  static_cast<VkPipeline>( pipeline ),
11517  static_cast<VkShaderStageFlagBits>( shaderStage ),
11518  static_cast<VkShaderInfoTypeAMD>( infoType ),
11519  pInfoSize,
11520  pInfo ) );
11521  }
11522 
11523 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11524  template <typename Uint8_tAllocator, typename Dispatch>
11526  Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
11529  Dispatch const & d ) const
11530  {
11531  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11532 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11533  VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" );
11534 # endif
11535 
11536  std::vector<uint8_t, Uint8_tAllocator> info;
11537  size_t infoSize;
11539  do
11540  {
11541  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device,
11542  static_cast<VkPipeline>( pipeline ),
11543  static_cast<VkShaderStageFlagBits>( shaderStage ),
11544  static_cast<VkShaderInfoTypeAMD>( infoType ),
11545  &infoSize,
11546  nullptr ) );
11547  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize )
11548  {
11549  info.resize( infoSize );
11550  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device,
11551  static_cast<VkPipeline>( pipeline ),
11552  static_cast<VkShaderStageFlagBits>( shaderStage ),
11553  static_cast<VkShaderInfoTypeAMD>( infoType ),
11554  &infoSize,
11555  reinterpret_cast<void *>( info.data() ) ) );
11556  }
11557  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
11558  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
11559  VULKAN_HPP_ASSERT( infoSize <= info.size() );
11560  if ( infoSize < info.size() )
11561  {
11562  info.resize( infoSize );
11563  }
11564  return createResultValueType( result, info );
11565  }
11566 
11567  template <typename Uint8_tAllocator,
11568  typename Dispatch,
11569  typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type>
11571  Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
11574  Uint8_tAllocator & uint8_tAllocator,
11575  Dispatch const & d ) const
11576  {
11577  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11578 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11579  VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" );
11580 # endif
11581 
11582  std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
11583  size_t infoSize;
11585  do
11586  {
11587  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device,
11588  static_cast<VkPipeline>( pipeline ),
11589  static_cast<VkShaderStageFlagBits>( shaderStage ),
11590  static_cast<VkShaderInfoTypeAMD>( infoType ),
11591  &infoSize,
11592  nullptr ) );
11593  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize )
11594  {
11595  info.resize( infoSize );
11596  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device,
11597  static_cast<VkPipeline>( pipeline ),
11598  static_cast<VkShaderStageFlagBits>( shaderStage ),
11599  static_cast<VkShaderInfoTypeAMD>( infoType ),
11600  &infoSize,
11601  reinterpret_cast<void *>( info.data() ) ) );
11602  }
11603  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
11604  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
11605  VULKAN_HPP_ASSERT( infoSize <= info.size() );
11606  if ( infoSize < info.size() )
11607  {
11608  info.resize( infoSize );
11609  }
11610  return createResultValueType( result, info );
11611  }
11612 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11613 
11614  //=== VK_KHR_dynamic_rendering ===
11615 
11616  template <typename Dispatch>
11617  VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
11618  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11619  {
11620  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11621  d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
11622  }
11623 
11624 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11625  template <typename Dispatch>
11626  VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
11627  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11628  {
11629  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11630 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11631  VULKAN_HPP_ASSERT( d.vkCmdBeginRenderingKHR && "Function <vkCmdBeginRenderingKHR> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
11632 # endif
11633 
11634  d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
11635  }
11636 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11637 
11638  template <typename Dispatch>
11639  VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11640  {
11641  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11642  d.vkCmdEndRenderingKHR( m_commandBuffer );
11643  }
11644 
11645 #if defined( VK_USE_PLATFORM_GGP )
11646  //=== VK_GGP_stream_descriptor_surface ===
11647 
11648  template <typename Dispatch>
11650  Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
11651  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11652  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
11653  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11654  {
11655  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11656  return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance,
11657  reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
11658  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11659  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
11660  }
11661 
11662 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11663  template <typename Dispatch>
11664  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
11665  Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
11666  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
11667  Dispatch const & d ) const
11668  {
11669  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11670 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11671  VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" );
11672 # endif
11673 
11674  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
11675  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP(
11676  m_instance,
11677  reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
11678  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11679  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
11680  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
11681 
11682  return createResultValueType( result, surface );
11683  }
11684 
11685 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11686  template <typename Dispatch>
11687  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
11688  Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
11689  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
11690  Dispatch const & d ) const
11691  {
11692  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11693 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11694  VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" );
11695 # endif
11696 
11697  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
11698  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP(
11699  m_instance,
11700  reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
11701  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11702  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
11703  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" );
11704 
11705  return createResultValueType(
11706  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
11707  }
11708 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
11709 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11710 #endif /*VK_USE_PLATFORM_GGP*/
11711 
11712  //=== VK_NV_external_memory_capabilities ===
11713 
11714  template <typename Dispatch>
11716  PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
11722  VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
11723  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11724  {
11725  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11726  return static_cast<Result>(
11727  d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
11728  static_cast<VkFormat>( format ),
11729  static_cast<VkImageType>( type ),
11730  static_cast<VkImageTiling>( tiling ),
11731  static_cast<VkImageUsageFlags>( usage ),
11732  static_cast<VkImageCreateFlags>( flags ),
11733  static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
11734  reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
11735  }
11736 
11737 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11738  template <typename Dispatch>
11740  PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
11746  Dispatch const & d ) const
11747  {
11748  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11749 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11750  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV &&
11751  "Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> requires <VK_NV_external_memory_capabilities>" );
11752 # endif
11753 
11754  VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
11756  d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
11757  static_cast<VkFormat>( format ),
11758  static_cast<VkImageType>( type ),
11759  static_cast<VkImageTiling>( tiling ),
11760  static_cast<VkImageUsageFlags>( usage ),
11761  static_cast<VkImageCreateFlags>( flags ),
11762  static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
11763  reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) );
11764  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
11765 
11766  return createResultValueType( result, externalImageFormatProperties );
11767  }
11768 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11769 
11770 #if defined( VK_USE_PLATFORM_WIN32_KHR )
11771  //=== VK_NV_external_memory_win32 ===
11772 
11773  template <typename Dispatch>
11774  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
11776  HANDLE * pHandle,
11777  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11778  {
11779  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11780  return static_cast<Result>(
11781  d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
11782  }
11783 
11784 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11785  template <typename Dispatch>
11786  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV(
11787  VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const
11788  {
11789  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11790 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11791  VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleNV && "Function <vkGetMemoryWin32HandleNV> requires <VK_NV_external_memory_win32>" );
11792 # endif
11793 
11794  HANDLE handle;
11796  d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
11797  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
11798 
11799  return createResultValueType( result, handle );
11800  }
11801 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11802 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
11803 
11804  //=== VK_KHR_get_physical_device_properties2 ===
11805 
11806  template <typename Dispatch>
11807  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
11808  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11809  {
11810  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11811  d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
11812  }
11813 
11814 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11815  template <typename Dispatch>
11816  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
11817  PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11818  {
11819  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11820 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11821  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR &&
11822  "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
11823 # endif
11824 
11825  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
11826  d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
11827 
11828  return features;
11829  }
11830 
11831  template <typename X, typename Y, typename... Z, typename Dispatch>
11832  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
11833  PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11834  {
11835  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11836 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11837  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR &&
11838  "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
11839 # endif
11840 
11841  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
11842  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
11843  d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
11844 
11845  return structureChain;
11846  }
11847 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11848 
11849  template <typename Dispatch>
11850  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
11851  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11852  {
11853  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11854  d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
11855  }
11856 
11857 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11858  template <typename Dispatch>
11859  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
11860  PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11861  {
11862  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11863 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11864  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR &&
11865  "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
11866 # endif
11867 
11868  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
11869  d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
11870 
11871  return properties;
11872  }
11873 
11874  template <typename X, typename Y, typename... Z, typename Dispatch>
11875  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
11876  PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11877  {
11878  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11879 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11880  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR &&
11881  "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
11882 # endif
11883 
11884  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
11885  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
11886  d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
11887 
11888  return structureChain;
11889  }
11890 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11891 
11892  template <typename Dispatch>
11893  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
11894  VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
11895  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11896  {
11897  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11898  d.vkGetPhysicalDeviceFormatProperties2KHR(
11899  m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
11900  }
11901 
11902 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11903  template <typename Dispatch>
11904  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
11905  PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11906  {
11907  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11908 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11909  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR &&
11910  "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
11911 # endif
11912 
11913  VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
11914  d.vkGetPhysicalDeviceFormatProperties2KHR(
11915  m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
11916 
11917  return formatProperties;
11918  }
11919 
11920  template <typename X, typename Y, typename... Z, typename Dispatch>
11921  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
11922  PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11923  {
11924  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11925 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11926  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR &&
11927  "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
11928 # endif
11929 
11930  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
11931  VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
11932  d.vkGetPhysicalDeviceFormatProperties2KHR(
11933  m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
11934 
11935  return structureChain;
11936  }
11937 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11938 
11939  template <typename Dispatch>
11941  PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
11942  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
11943  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11944  {
11945  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11946  return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
11947  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
11948  reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
11949  }
11950 
11951 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11952  template <typename Dispatch>
11954  PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
11955  {
11956  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11957 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11958  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR &&
11959  "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
11960 # endif
11961 
11962  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
11964  d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
11965  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
11966  reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
11967  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
11968 
11969  return createResultValueType( result, imageFormatProperties );
11970  }
11971 
11972  template <typename X, typename Y, typename... Z, typename Dispatch>
11973  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
11974  PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
11975  {
11976  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11977 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
11978  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR &&
11979  "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
11980 # endif
11981 
11982  StructureChain<X, Y, Z...> structureChain;
11983  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
11985  d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
11986  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
11987  reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
11988  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
11989 
11990  return createResultValueType( result, structureChain );
11991  }
11992 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11993 
11994  template <typename Dispatch>
11995  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
11996  VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
11997  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11998  {
11999  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12000  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
12001  m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
12002  }
12003 
12004 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12005  template <typename QueueFamilyProperties2Allocator, typename Dispatch>
12006  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
12007  PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
12008  {
12009  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12010 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12011  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
12012  "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
12013 # endif
12014 
12015  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
12016  uint32_t queueFamilyPropertyCount;
12017  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
12018  queueFamilyProperties.resize( queueFamilyPropertyCount );
12019  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
12020  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
12021 
12022  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
12023  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
12024  {
12025  queueFamilyProperties.resize( queueFamilyPropertyCount );
12026  }
12027  return queueFamilyProperties;
12028  }
12029 
12030  template <
12031  typename QueueFamilyProperties2Allocator,
12032  typename Dispatch,
12033  typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type>
12034  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
12035  PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
12036  {
12037  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12038 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12039  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
12040  "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
12041 # endif
12042 
12043  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
12044  uint32_t queueFamilyPropertyCount;
12045  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
12046  queueFamilyProperties.resize( queueFamilyPropertyCount );
12047  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
12048  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
12049 
12050  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
12051  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
12052  {
12053  queueFamilyProperties.resize( queueFamilyPropertyCount );
12054  }
12055  return queueFamilyProperties;
12056  }
12057 
12058  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
12059  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
12060  PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
12061  {
12062  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12063 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12064  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
12065  "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
12066 # endif
12067 
12068  std::vector<StructureChain, StructureChainAllocator> structureChains;
12069  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
12070  uint32_t queueFamilyPropertyCount;
12071  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
12072  structureChains.resize( queueFamilyPropertyCount );
12073  queueFamilyProperties.resize( queueFamilyPropertyCount );
12074  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
12075  {
12076  queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
12077  }
12078  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
12079  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
12080 
12081  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
12082  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
12083  {
12084  structureChains.resize( queueFamilyPropertyCount );
12085  }
12086  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
12087  {
12088  structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
12089  }
12090  return structureChains;
12091  }
12092 
12093  template <typename StructureChain,
12094  typename StructureChainAllocator,
12095  typename Dispatch,
12096  typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type>
12097  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
12098  PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
12099  {
12100  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12101 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12102  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
12103  "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
12104 # endif
12105 
12106  std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
12107  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
12108  uint32_t queueFamilyPropertyCount;
12109  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
12110  structureChains.resize( queueFamilyPropertyCount );
12111  queueFamilyProperties.resize( queueFamilyPropertyCount );
12112  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
12113  {
12114  queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
12115  }
12116  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
12117  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
12118 
12119  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
12120  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
12121  {
12122  structureChains.resize( queueFamilyPropertyCount );
12123  }
12124  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
12125  {
12126  structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
12127  }
12128  return structureChains;
12129  }
12130 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12131 
12132  template <typename Dispatch>
12133  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
12134  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12135  {
12136  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12137  d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
12138  }
12139 
12140 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12141  template <typename Dispatch>
12142  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
12143  PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12144  {
12145  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12146 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12147  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR &&
12148  "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
12149 # endif
12150 
12151  VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
12152  d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
12153 
12154  return memoryProperties;
12155  }
12156 
12157  template <typename X, typename Y, typename... Z, typename Dispatch>
12158  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
12159  PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12160  {
12161  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12162 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12163  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR &&
12164  "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
12165 # endif
12166 
12167  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
12168  VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
12169  structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
12170  d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
12171 
12172  return structureChain;
12173  }
12174 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12175 
12176  template <typename Dispatch>
12177  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
12178  uint32_t * pPropertyCount,
12179  VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
12180  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12181  {
12182  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12183  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
12184  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
12185  pPropertyCount,
12186  reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
12187  }
12188 
12189 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12190  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
12191  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
12192  PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
12193  Dispatch const & d ) const
12194  {
12195  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12196 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12197  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR &&
12198  "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
12199 # endif
12200 
12201  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
12202  uint32_t propertyCount;
12203  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
12204  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
12205  properties.resize( propertyCount );
12206  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
12207  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
12208  &propertyCount,
12209  reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
12210 
12211  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
12212  if ( propertyCount < properties.size() )
12213  {
12214  properties.resize( propertyCount );
12215  }
12216  return properties;
12217  }
12218 
12219  template <
12220  typename SparseImageFormatProperties2Allocator,
12221  typename Dispatch,
12222  typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value,
12223  int>::type>
12224  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
12225  PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
12226  SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
12227  Dispatch const & d ) const
12228  {
12229  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12230 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12231  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR &&
12232  "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
12233 # endif
12234 
12235  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
12236  uint32_t propertyCount;
12237  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
12238  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
12239  properties.resize( propertyCount );
12240  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
12241  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
12242  &propertyCount,
12243  reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
12244 
12245  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
12246  if ( propertyCount < properties.size() )
12247  {
12248  properties.resize( propertyCount );
12249  }
12250  return properties;
12251  }
12252 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12253 
12254  //=== VK_KHR_device_group ===
12255 
12256  template <typename Dispatch>
12257  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
12258  uint32_t localDeviceIndex,
12259  uint32_t remoteDeviceIndex,
12260  VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
12261  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12262  {
12263  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12264  d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
12265  m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
12266  }
12267 
12268 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12269  template <typename Dispatch>
12271  uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12272  {
12273  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12274 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12275  VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeaturesKHR &&
12276  "Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
12277 # endif
12278 
12280  d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
12281  m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
12282 
12283  return peerMemoryFeatures;
12284  }
12285 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12286 
12287  template <typename Dispatch>
12288  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12289  {
12290  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12291  d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
12292  }
12293 
12294  template <typename Dispatch>
12295  VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX,
12296  uint32_t baseGroupY,
12297  uint32_t baseGroupZ,
12298  uint32_t groupCountX,
12299  uint32_t groupCountY,
12300  uint32_t groupCountZ,
12301  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12302  {
12303  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12304  d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
12305  }
12306 
12307 #if defined( VK_USE_PLATFORM_VI_NN )
12308  //=== VK_NN_vi_surface ===
12309 
12310  template <typename Dispatch>
12311  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
12312  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
12313  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
12314  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12315  {
12316  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12317  return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
12318  reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ),
12319  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
12320  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
12321  }
12322 
12323 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12324  template <typename Dispatch>
12325  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
12326  Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
12327  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
12328  Dispatch const & d ) const
12329  {
12330  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12331 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12332  VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" );
12333 # endif
12334 
12335  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
12337  d.vkCreateViSurfaceNN( m_instance,
12338  reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
12339  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
12340  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
12341  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
12342 
12343  return createResultValueType( result, surface );
12344  }
12345 
12346 # ifndef VULKAN_HPP_NO_SMART_HANDLE
12347  template <typename Dispatch>
12348  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
12349  Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
12350  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
12351  Dispatch const & d ) const
12352  {
12353  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12354 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12355  VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" );
12356 # endif
12357 
12358  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
12360  d.vkCreateViSurfaceNN( m_instance,
12361  reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
12362  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
12363  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
12364  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" );
12365 
12366  return createResultValueType(
12367  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
12368  }
12369 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
12370 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12371 #endif /*VK_USE_PLATFORM_VI_NN*/
12372 
12373  //=== VK_KHR_maintenance1 ===
12374 
12375  template <typename Dispatch>
12376  VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
12378  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12379  {
12380  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12381  d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
12382  }
12383 
12384  //=== VK_KHR_device_group_creation ===
12385 
12386  template <typename Dispatch>
12388  Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount,
12389  VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
12390  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12391  {
12392  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12393  return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
12394  m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
12395  }
12396 
12397 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12398  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
12401  Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const
12402  {
12403  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12404 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12405  VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR &&
12406  "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" );
12407 # endif
12408 
12409  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
12410  uint32_t physicalDeviceGroupCount;
12412  do
12413  {
12414  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
12415  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount )
12416  {
12417  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
12418  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
12419  m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
12420  }
12421  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
12422  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
12423  VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
12424  if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
12425  {
12426  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
12427  }
12428  return createResultValueType( result, physicalDeviceGroupProperties );
12429  }
12430 
12431  template <typename PhysicalDeviceGroupPropertiesAllocator,
12432  typename Dispatch,
12433  typename std::enable_if<
12434  std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value,
12435  int>::type>
12438  Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
12439  {
12440  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12441 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12442  VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR &&
12443  "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" );
12444 # endif
12445 
12446  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
12447  physicalDeviceGroupPropertiesAllocator );
12448  uint32_t physicalDeviceGroupCount;
12450  do
12451  {
12452  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
12453  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount )
12454  {
12455  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
12456  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
12457  m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
12458  }
12459  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
12460  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
12461  VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
12462  if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
12463  {
12464  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
12465  }
12466  return createResultValueType( result, physicalDeviceGroupProperties );
12467  }
12468 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12469 
12470  //=== VK_KHR_external_memory_capabilities ===
12471 
12472  template <typename Dispatch>
12473  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
12474  VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
12475  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12476  {
12477  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12478  d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
12479  reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
12480  reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
12481  }
12482 
12483 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12484  template <typename Dispatch>
12485  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
12486  PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
12487  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12488  {
12489  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12490 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12491  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferPropertiesKHR &&
12492  "Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" );
12493 # endif
12494 
12495  VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
12496  d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
12497  reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
12498  reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
12499 
12500  return externalBufferProperties;
12501  }
12502 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12503 
12504 #if defined( VK_USE_PLATFORM_WIN32_KHR )
12505  //=== VK_KHR_external_memory_win32 ===
12506 
12507  template <typename Dispatch>
12508  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
12509  HANDLE * pHandle,
12510  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12511  {
12512  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12513  return static_cast<Result>(
12514  d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
12515  }
12516 
12517 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12518  template <typename Dispatch>
12519  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
12520  Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
12521  {
12522  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12523 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12524  VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleKHR && "Function <vkGetMemoryWin32HandleKHR> requires <VK_KHR_external_memory_win32>" );
12525 # endif
12526 
12527  HANDLE handle;
12529  d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
12530  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
12531 
12532  return createResultValueType( result, handle );
12533  }
12534 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12535 
12536  template <typename Dispatch>
12538  Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
12539  HANDLE handle,
12540  VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
12541  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12542  {
12543  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12544  return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
12545  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
12546  handle,
12547  reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
12548  }
12549 
12550 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12551  template <typename Dispatch>
12552  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
12553  Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const
12554  {
12555  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12556 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12557  VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandlePropertiesKHR && "Function <vkGetMemoryWin32HandlePropertiesKHR> requires <VK_KHR_external_memory_win32>" );
12558 # endif
12559 
12560  VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
12562  d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
12563  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
12564  handle,
12565  reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) );
12566  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
12567 
12568  return createResultValueType( result, memoryWin32HandleProperties );
12569  }
12570 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12571 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
12572 
12573  //=== VK_KHR_external_memory_fd ===
12574 
12575  template <typename Dispatch>
12576  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
12577  int * pFd,
12578  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12579  {
12580  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12581  return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
12582  }
12583 
12584 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12585  template <typename Dispatch>
12586  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,
12587  Dispatch const & d ) const
12588  {
12589  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12590 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12591  VULKAN_HPP_ASSERT( d.vkGetMemoryFdKHR && "Function <vkGetMemoryFdKHR> requires <VK_KHR_external_memory_fd>" );
12592 # endif
12593 
12594  int fd;
12596  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) );
12597  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
12598 
12599  return createResultValueType( result, fd );
12600  }
12601 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12602 
12603  template <typename Dispatch>
12605  int fd,
12606  VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
12607  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12608  {
12609  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12610  return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR(
12611  m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
12612  }
12613 
12614 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12615  template <typename Dispatch>
12617  Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const
12618  {
12619  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12620 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12621  VULKAN_HPP_ASSERT( d.vkGetMemoryFdPropertiesKHR && "Function <vkGetMemoryFdPropertiesKHR> requires <VK_KHR_external_memory_fd>" );
12622 # endif
12623 
12624  VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
12625  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdPropertiesKHR(
12626  m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) );
12627  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
12628 
12629  return createResultValueType( result, memoryFdProperties );
12630  }
12631 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12632 
12633  //=== VK_KHR_external_semaphore_capabilities ===
12634 
12635  template <typename Dispatch>
12636  VULKAN_HPP_INLINE void
12637  PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
12638  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
12639  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12640  {
12641  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12642  d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
12643  reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
12644  reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
12645  }
12646 
12647 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12648  template <typename Dispatch>
12649  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
12650  PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
12651  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12652  {
12653  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12654 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12655  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR &&
12656  "Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" );
12657 # endif
12658 
12659  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
12660  d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
12661  reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
12662  reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
12663 
12664  return externalSemaphoreProperties;
12665  }
12666 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12667 
12668 #if defined( VK_USE_PLATFORM_WIN32_KHR )
12669  //=== VK_KHR_external_semaphore_win32 ===
12670 
12671  template <typename Dispatch>
12672  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR(
12673  const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12674  {
12675  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12676  return static_cast<Result>(
12677  d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
12678  }
12679 
12680 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12681  template <typename Dispatch>
12682  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
12683  Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
12684  Dispatch const & d ) const
12685  {
12686  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12687 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12688  VULKAN_HPP_ASSERT( d.vkImportSemaphoreWin32HandleKHR && "Function <vkImportSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" );
12689 # endif
12690 
12692  d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) );
12693  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
12694 
12695  return createResultValueType( result );
12696  }
12697 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12698 
12699  template <typename Dispatch>
12700  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR(
12701  const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12702  {
12703  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12704  return static_cast<Result>(
12705  d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
12706  }
12707 
12708 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12709  template <typename Dispatch>
12710  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
12711  Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
12712  {
12713  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12714 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12715  VULKAN_HPP_ASSERT( d.vkGetSemaphoreWin32HandleKHR && "Function <vkGetSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" );
12716 # endif
12717 
12718  HANDLE handle;
12720  d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
12721  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
12722 
12723  return createResultValueType( result, handle );
12724  }
12725 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12726 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
12727 
12728  //=== VK_KHR_external_semaphore_fd ===
12729 
12730  template <typename Dispatch>
12731  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
12732  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12733  {
12734  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12735  return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
12736  }
12737 
12738 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12739  template <typename Dispatch>
12741  Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
12742  {
12743  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12744 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12745  VULKAN_HPP_ASSERT( d.vkImportSemaphoreFdKHR && "Function <vkImportSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" );
12746 # endif
12747 
12749  d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) );
12750  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
12751 
12752  return createResultValueType( result );
12753  }
12754 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12755 
12756  template <typename Dispatch>
12757  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
12758  int * pFd,
12759  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12760  {
12761  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12762  return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
12763  }
12764 
12765 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12766  template <typename Dispatch>
12768  Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
12769  {
12770  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12771 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12772  VULKAN_HPP_ASSERT( d.vkGetSemaphoreFdKHR && "Function <vkGetSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" );
12773 # endif
12774 
12775  int fd;
12777  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) );
12778  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
12779 
12780  return createResultValueType( result, fd );
12781  }
12782 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12783 
12784  //=== VK_KHR_push_descriptor ===
12785 
12786  template <typename Dispatch>
12787  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
12788  VULKAN_HPP_NAMESPACE::PipelineLayout layout,
12789  uint32_t set,
12790  uint32_t descriptorWriteCount,
12791  const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
12792  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12793  {
12794  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12795  d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
12796  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
12797  static_cast<VkPipelineLayout>( layout ),
12798  set,
12799  descriptorWriteCount,
12800  reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
12801  }
12802 
12803 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12804  template <typename Dispatch>
12805  VULKAN_HPP_INLINE void
12806  CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
12807  VULKAN_HPP_NAMESPACE::PipelineLayout layout,
12808  uint32_t set,
12809  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
12810  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12811  {
12812  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12813 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12814  VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetKHR && "Function <vkCmdPushDescriptorSetKHR> requires <VK_KHR_push_descriptor>" );
12815 # endif
12816 
12817  d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
12818  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
12819  static_cast<VkPipelineLayout>( layout ),
12820  set,
12821  descriptorWrites.size(),
12822  reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
12823  }
12824 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12825 
12826  template <typename Dispatch>
12827  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
12828  VULKAN_HPP_NAMESPACE::PipelineLayout layout,
12829  uint32_t set,
12830  const void * pData,
12831  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12832  {
12833  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12834  d.vkCmdPushDescriptorSetWithTemplateKHR(
12835  m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
12836  }
12837 
12838 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12839  template <typename DataType, typename Dispatch>
12840  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
12841  VULKAN_HPP_NAMESPACE::PipelineLayout layout,
12842  uint32_t set,
12843  DataType const & data,
12844  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12845  {
12846  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12847 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12848  VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplateKHR &&
12849  "Function <vkCmdPushDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor>" );
12850 # endif
12851 
12852  d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
12853  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
12854  static_cast<VkPipelineLayout>( layout ),
12855  set,
12856  reinterpret_cast<const void *>( &data ) );
12857  }
12858 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12859 
12860  //=== VK_EXT_conditional_rendering ===
12861 
12862  template <typename Dispatch>
12863  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
12864  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12865  {
12866  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12867  d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
12868  }
12869 
12870 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12871  template <typename Dispatch>
12872  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
12873  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12874  {
12875  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12876 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12877  VULKAN_HPP_ASSERT( d.vkCmdBeginConditionalRenderingEXT && "Function <vkCmdBeginConditionalRenderingEXT> requires <VK_EXT_conditional_rendering>" );
12878 # endif
12879 
12880  d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
12881  }
12882 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12883 
12884  template <typename Dispatch>
12885  VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12886  {
12887  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12888  d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
12889  }
12890 
12891  //=== VK_KHR_descriptor_update_template ===
12892 
12893  template <typename Dispatch>
12895  Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
12896  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
12897  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
12898  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12899  {
12900  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12901  return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device,
12902  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
12903  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
12904  reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
12905  }
12906 
12907 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12908  template <typename Dispatch>
12910  Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
12912  Dispatch const & d ) const
12913  {
12914  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12915 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12916  VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR &&
12917  "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
12918 # endif
12919 
12920  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
12921  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR(
12922  m_device,
12923  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
12924  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
12925  reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
12926  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
12927 
12928  return createResultValueType( result, descriptorUpdateTemplate );
12929  }
12930 
12931 # ifndef VULKAN_HPP_NO_SMART_HANDLE
12932  template <typename Dispatch>
12934  Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
12936  Dispatch const & d ) const
12937  {
12938  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12939 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12940  VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR &&
12941  "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
12942 # endif
12943 
12944  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
12945  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR(
12946  m_device,
12947  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
12948  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
12949  reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
12950  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" );
12951 
12952  return createResultValueType( result,
12954  descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
12955  }
12956 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
12957 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12958 
12959  template <typename Dispatch>
12960  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
12961  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
12962  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12963  {
12964  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12965  d.vkDestroyDescriptorUpdateTemplateKHR(
12966  m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
12967  }
12968 
12969 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12970  template <typename Dispatch>
12971  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
12973  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12974  {
12975  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12976 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
12977  VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplateKHR &&
12978  "Function <vkDestroyDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
12979 # endif
12980 
12981  d.vkDestroyDescriptorUpdateTemplateKHR(
12982  m_device,
12983  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
12984  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
12985  }
12986 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12987 
12988  template <typename Dispatch>
12989  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
12990  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
12991  const void * pData,
12992  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12993  {
12994  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12995  d.vkUpdateDescriptorSetWithTemplateKHR(
12996  m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
12997  }
12998 
12999 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13000  template <typename DataType, typename Dispatch>
13001  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
13002  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
13003  DataType const & data,
13004  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13005  {
13006  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13007 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13008  VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplateKHR &&
13009  "Function <vkUpdateDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
13010 # endif
13011 
13012  d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
13013  static_cast<VkDescriptorSet>( descriptorSet ),
13014  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
13015  reinterpret_cast<const void *>( &data ) );
13016  }
13017 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13018 
13019  //=== VK_NV_clip_space_w_scaling ===
13020 
13021  template <typename Dispatch>
13022  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
13023  uint32_t viewportCount,
13024  const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
13025  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13026  {
13027  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13028  d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
13029  }
13030 
13031 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13032  template <typename Dispatch>
13033  VULKAN_HPP_INLINE void
13034  CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
13035  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
13036  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13037  {
13038  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13039 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13040  VULKAN_HPP_ASSERT( d.vkCmdSetViewportWScalingNV && "Function <vkCmdSetViewportWScalingNV> requires <VK_NV_clip_space_w_scaling>" );
13041 # endif
13042 
13043  d.vkCmdSetViewportWScalingNV(
13044  m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
13045  }
13046 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13047 
13048  //=== VK_EXT_direct_mode_display ===
13049 
13050 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
13051  template <typename Dispatch>
13052  VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13053  {
13054  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13055  return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
13056  }
13057 #else
13058  template <typename Dispatch>
13059  VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13060  {
13061  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13062 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13063  VULKAN_HPP_ASSERT( d.vkReleaseDisplayEXT && "Function <vkReleaseDisplayEXT> requires <VK_EXT_direct_mode_display>" );
13064 # endif
13065 
13066  d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
13067  }
13068 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
13069 
13070 #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
13071  //=== VK_EXT_acquire_xlib_display ===
13072 
13073  template <typename Dispatch>
13074  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy,
13075  VULKAN_HPP_NAMESPACE::DisplayKHR display,
13076  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13077  {
13078  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13079  return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
13080  }
13081 
13082 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13083  template <typename Dispatch>
13084  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
13085  PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
13086  {
13087  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13088 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13089  VULKAN_HPP_ASSERT( d.vkAcquireXlibDisplayEXT && "Function <vkAcquireXlibDisplayEXT> requires <VK_EXT_acquire_xlib_display>" );
13090 # endif
13091 
13093  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
13094  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
13095 
13096  return createResultValueType( result );
13097  }
13098 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13099 
13100  template <typename Dispatch>
13101  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy,
13102  RROutput rrOutput,
13103  VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
13104  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13105  {
13106  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13107  return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
13108  }
13109 
13110 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13111  template <typename Dispatch>
13112  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
13113  PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
13114  {
13115  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13116 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13117  VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" );
13118 # endif
13119 
13120  VULKAN_HPP_NAMESPACE::DisplayKHR display;
13122  d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
13123  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
13124 
13125  return createResultValueType( result, display );
13126  }
13127 
13128 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13129  template <typename Dispatch>
13130  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
13131  PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
13132  {
13133  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13134 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13135  VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" );
13136 # endif
13137 
13138  VULKAN_HPP_NAMESPACE::DisplayKHR display;
13140  d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
13141  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" );
13142 
13143  return createResultValueType( result,
13144  UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
13145  }
13146 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
13147 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13148 #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
13149 
13150  //=== VK_EXT_display_surface_counter ===
13151 
13152  template <typename Dispatch>
13154  PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
13155  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
13156  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13157  {
13158  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13159  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
13160  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
13161  }
13162 
13163 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13164  template <typename Dispatch>
13166  PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
13167  {
13168  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13169 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13170  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT &&
13171  "Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> requires <VK_EXT_display_surface_counter>" );
13172 # endif
13173 
13174  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
13175  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
13176  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) );
13177  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
13178 
13179  return createResultValueType( result, surfaceCapabilities );
13180  }
13181 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13182 
13183  //=== VK_EXT_display_control ===
13184 
13185  template <typename Dispatch>
13186  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
13187  const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
13188  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13189  {
13190  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13191  return static_cast<Result>(
13192  d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
13193  }
13194 
13195 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13196  template <typename Dispatch>
13197  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
13198  const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,
13199  Dispatch const & d ) const
13200  {
13201  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13202 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13203  VULKAN_HPP_ASSERT( d.vkDisplayPowerControlEXT && "Function <vkDisplayPowerControlEXT> requires <VK_EXT_display_control>" );
13204 # endif
13205 
13207  d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) );
13208  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
13209 
13210  return createResultValueType( result );
13211  }
13212 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13213 
13214  template <typename Dispatch>
13215  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
13216  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13217  VULKAN_HPP_NAMESPACE::Fence * pFence,
13218  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13219  {
13220  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13221  return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device,
13222  reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
13223  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13224  reinterpret_cast<VkFence *>( pFence ) ) );
13225  }
13226 
13227 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13228  template <typename Dispatch>
13230  Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
13232  Dispatch const & d ) const
13233  {
13234  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13235 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13236  VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" );
13237 # endif
13238 
13239  VULKAN_HPP_NAMESPACE::Fence fence;
13240  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT(
13241  m_device,
13242  reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
13243  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13244  reinterpret_cast<VkFence *>( &fence ) ) );
13245  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
13246 
13247  return createResultValueType( result, fence );
13248  }
13249 
13250 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13251  template <typename Dispatch>
13253  Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
13255  Dispatch const & d ) const
13256  {
13257  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13258 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13259  VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" );
13260 # endif
13261 
13262  VULKAN_HPP_NAMESPACE::Fence fence;
13263  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT(
13264  m_device,
13265  reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
13266  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13267  reinterpret_cast<VkFence *>( &fence ) ) );
13268  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" );
13269 
13270  return createResultValueType( result,
13272  }
13273 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
13274 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13275 
13276  template <typename Dispatch>
13277  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
13278  const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
13279  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13280  VULKAN_HPP_NAMESPACE::Fence * pFence,
13281  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13282  {
13283  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13284  return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device,
13285  static_cast<VkDisplayKHR>( display ),
13286  reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
13287  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13288  reinterpret_cast<VkFence *>( pFence ) ) );
13289  }
13290 
13291 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13292  template <typename Dispatch>
13294  Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
13295  const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
13297  Dispatch const & d ) const
13298  {
13299  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13300 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13301  VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" );
13302 # endif
13303 
13304  VULKAN_HPP_NAMESPACE::Fence fence;
13305  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT(
13306  m_device,
13307  static_cast<VkDisplayKHR>( display ),
13308  reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
13309  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13310  reinterpret_cast<VkFence *>( &fence ) ) );
13311  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
13312 
13313  return createResultValueType( result, fence );
13314  }
13315 
13316 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13317  template <typename Dispatch>
13319  Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
13320  const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
13322  Dispatch const & d ) const
13323  {
13324  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13325 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13326  VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" );
13327 # endif
13328 
13329  VULKAN_HPP_NAMESPACE::Fence fence;
13330  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT(
13331  m_device,
13332  static_cast<VkDisplayKHR>( display ),
13333  reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
13334  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13335  reinterpret_cast<VkFence *>( &fence ) ) );
13336  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" );
13337 
13338  return createResultValueType( result,
13340  }
13341 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
13342 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13343 
13344  template <typename Dispatch>
13345  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
13347  uint64_t * pCounterValue,
13348  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13349  {
13350  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13351  return static_cast<Result>(
13352  d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
13353  }
13354 
13355 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13356  template <typename Dispatch>
13358  VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const
13359  {
13360  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13361 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13362  VULKAN_HPP_ASSERT( d.vkGetSwapchainCounterEXT && "Function <vkGetSwapchainCounterEXT> requires <VK_EXT_display_control>" );
13363 # endif
13364 
13365  uint64_t counterValue;
13367  d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
13368  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
13369 
13370  return createResultValueType( result, counterValue );
13371  }
13372 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13373 
13374  //=== VK_GOOGLE_display_timing ===
13375 
13376  template <typename Dispatch>
13378  Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
13379  VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
13380  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13381  {
13382  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13383  return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
13384  m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
13385  }
13386 
13387 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13388  template <typename Dispatch>
13390  Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
13391  {
13392  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13393 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13394  VULKAN_HPP_ASSERT( d.vkGetRefreshCycleDurationGOOGLE && "Function <vkGetRefreshCycleDurationGOOGLE> requires <VK_GOOGLE_display_timing>" );
13395 # endif
13396 
13397  VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
13398  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRefreshCycleDurationGOOGLE(
13399  m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) );
13400  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
13401 
13402  return createResultValueType( result, displayTimingProperties );
13403  }
13404 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13405 
13406  template <typename Dispatch>
13408  Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
13409  uint32_t * pPresentationTimingCount,
13410  VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
13411  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13412  {
13413  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13414  return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device,
13415  static_cast<VkSwapchainKHR>( swapchain ),
13416  pPresentationTimingCount,
13417  reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
13418  }
13419 
13420 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13421  template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
13424  Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
13425  {
13426  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13427 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13428  VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" );
13429 # endif
13430 
13431  std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
13432  uint32_t presentationTimingCount;
13434  do
13435  {
13436  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
13437  d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
13438  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount )
13439  {
13440  presentationTimings.resize( presentationTimingCount );
13441  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
13442  d.vkGetPastPresentationTimingGOOGLE( m_device,
13443  static_cast<VkSwapchainKHR>( swapchain ),
13444  &presentationTimingCount,
13445  reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
13446  }
13447  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
13448  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
13449  VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
13450  if ( presentationTimingCount < presentationTimings.size() )
13451  {
13452  presentationTimings.resize( presentationTimingCount );
13453  }
13454  return createResultValueType( result, presentationTimings );
13455  }
13456 
13457  template <
13458  typename PastPresentationTimingGOOGLEAllocator,
13459  typename Dispatch,
13460  typename std::enable_if<std::is_same<typename PastPresentationTimingGOOGLEAllocator::value_type, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value,
13461  int>::type>
13464  Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
13465  PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
13466  Dispatch const & d ) const
13467  {
13468  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13469 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13470  VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" );
13471 # endif
13472 
13473  std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
13474  pastPresentationTimingGOOGLEAllocator );
13475  uint32_t presentationTimingCount;
13477  do
13478  {
13479  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
13480  d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
13481  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount )
13482  {
13483  presentationTimings.resize( presentationTimingCount );
13484  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
13485  d.vkGetPastPresentationTimingGOOGLE( m_device,
13486  static_cast<VkSwapchainKHR>( swapchain ),
13487  &presentationTimingCount,
13488  reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
13489  }
13490  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
13491  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
13492  VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
13493  if ( presentationTimingCount < presentationTimings.size() )
13494  {
13495  presentationTimings.resize( presentationTimingCount );
13496  }
13497  return createResultValueType( result, presentationTimings );
13498  }
13499 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13500 
13501  //=== VK_EXT_discard_rectangles ===
13502 
13503  template <typename Dispatch>
13504  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
13505  uint32_t discardRectangleCount,
13506  const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
13507  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13508  {
13509  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13510  d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
13511  }
13512 
13513 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13514  template <typename Dispatch>
13515  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
13516  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
13517  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13518  {
13519  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13520 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13521  VULKAN_HPP_ASSERT( d.vkCmdSetDiscardRectangleEXT && "Function <vkCmdSetDiscardRectangleEXT> requires <VK_EXT_discard_rectangles>" );
13522 # endif
13523 
13524  d.vkCmdSetDiscardRectangleEXT(
13525  m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
13526  }
13527 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13528 
13529  template <typename Dispatch>
13530  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable,
13531  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13532  {
13533  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13534  d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast<VkBool32>( discardRectangleEnable ) );
13535  }
13536 
13537  template <typename Dispatch>
13538  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode,
13539  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13540  {
13541  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13542  d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) );
13543  }
13544 
13545  //=== VK_EXT_hdr_metadata ===
13546 
13547  template <typename Dispatch>
13548  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount,
13549  const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
13550  const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
13551  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13552  {
13553  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13554  d.vkSetHdrMetadataEXT(
13555  m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
13556  }
13557 
13558 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13559  template <typename Dispatch>
13560  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
13561  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
13562  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
13563  {
13564  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13565 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13566  VULKAN_HPP_ASSERT( d.vkSetHdrMetadataEXT && "Function <vkSetHdrMetadataEXT> requires <VK_EXT_hdr_metadata>" );
13567 # endif
13568 # ifdef VULKAN_HPP_NO_EXCEPTIONS
13569  VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
13570 # else
13571  if ( swapchains.size() != metadata.size() )
13572  {
13573  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
13574  }
13575 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
13576 
13577  d.vkSetHdrMetadataEXT( m_device,
13578  swapchains.size(),
13579  reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ),
13580  reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
13581  }
13582 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13583 
13584  //=== VK_KHR_create_renderpass2 ===
13585 
13586  template <typename Dispatch>
13587  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
13588  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13589  VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
13590  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13591  {
13592  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13593  return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device,
13594  reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
13595  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13596  reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
13597  }
13598 
13599 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13600  template <typename Dispatch>
13602  Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
13604  Dispatch const & d ) const
13605  {
13606  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13607 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13608  VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
13609 # endif
13610 
13611  VULKAN_HPP_NAMESPACE::RenderPass renderPass;
13613  d.vkCreateRenderPass2KHR( m_device,
13614  reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
13615  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13616  reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
13617  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
13618 
13619  return createResultValueType( result, renderPass );
13620  }
13621 
13622 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13623  template <typename Dispatch>
13625  Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
13627  Dispatch const & d ) const
13628  {
13629  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13630 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13631  VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
13632 # endif
13633 
13634  VULKAN_HPP_NAMESPACE::RenderPass renderPass;
13636  d.vkCreateRenderPass2KHR( m_device,
13637  reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
13638  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13639  reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
13640  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" );
13641 
13642  return createResultValueType(
13643  result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
13644  }
13645 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
13646 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13647 
13648  template <typename Dispatch>
13649  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
13650  const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
13651  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13652  {
13653  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13654  d.vkCmdBeginRenderPass2KHR(
13655  m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
13656  }
13657 
13658 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13659  template <typename Dispatch>
13660  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
13661  const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
13662  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13663  {
13664  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13665 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13666  VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2KHR && "Function <vkCmdBeginRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
13667 # endif
13668 
13669  d.vkCmdBeginRenderPass2KHR(
13670  m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
13671  }
13672 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13673 
13674  template <typename Dispatch>
13675  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
13676  const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
13677  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13678  {
13679  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13680  d.vkCmdNextSubpass2KHR(
13681  m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
13682  }
13683 
13684 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13685  template <typename Dispatch>
13686  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
13687  const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
13688  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13689  {
13690  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13691 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13692  VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2KHR && "Function <vkCmdNextSubpass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
13693 # endif
13694 
13695  d.vkCmdNextSubpass2KHR(
13696  m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
13697  }
13698 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13699 
13700  template <typename Dispatch>
13701  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
13702  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13703  {
13704  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13705  d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
13706  }
13707 
13708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13709  template <typename Dispatch>
13710  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
13711  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13712  {
13713  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13714 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13715  VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2KHR && "Function <vkCmdEndRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
13716 # endif
13717 
13718  d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
13719  }
13720 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13721 
13722  //=== VK_KHR_shared_presentable_image ===
13723 
13724 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
13725  template <typename Dispatch>
13726  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
13727  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13728  {
13729  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13730  return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
13731  }
13732 #else
13733  template <typename Dispatch>
13734  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
13735  Dispatch const & d ) const
13736  {
13737  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13738 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13739  VULKAN_HPP_ASSERT( d.vkGetSwapchainStatusKHR && "Function <vkGetSwapchainStatusKHR> requires <VK_KHR_shared_presentable_image>" );
13740 # endif
13741 
13743  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
13744  resultCheck( result,
13745  VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR",
13746  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
13747 
13748  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
13749  }
13750 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
13751 
13752  //=== VK_KHR_external_fence_capabilities ===
13753 
13754  template <typename Dispatch>
13755  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
13756  VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
13757  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13758  {
13759  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13760  d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
13761  reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
13762  reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
13763  }
13764 
13765 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13766  template <typename Dispatch>
13767  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
13768  PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
13769  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13770  {
13771  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13772 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13773  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFencePropertiesKHR &&
13774  "Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" );
13775 # endif
13776 
13777  VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
13778  d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
13779  reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
13780  reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
13781 
13782  return externalFenceProperties;
13783  }
13784 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13785 
13786 #if defined( VK_USE_PLATFORM_WIN32_KHR )
13787  //=== VK_KHR_external_fence_win32 ===
13788 
13789  template <typename Dispatch>
13790  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR(
13791  const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13792  {
13793  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13794  return static_cast<Result>(
13795  d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
13796  }
13797 
13798 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13799  template <typename Dispatch>
13800  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
13801  Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const
13802  {
13803  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13804 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13805  VULKAN_HPP_ASSERT( d.vkImportFenceWin32HandleKHR && "Function <vkImportFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" );
13806 # endif
13807 
13809  d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) );
13810  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
13811 
13812  return createResultValueType( result );
13813  }
13814 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13815 
13816  template <typename Dispatch>
13817  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
13818  HANDLE * pHandle,
13819  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13820  {
13821  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13822  return static_cast<Result>(
13823  d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
13824  }
13825 
13826 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13827  template <typename Dispatch>
13828  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
13829  Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
13830  {
13831  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13832 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13833  VULKAN_HPP_ASSERT( d.vkGetFenceWin32HandleKHR && "Function <vkGetFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" );
13834 # endif
13835 
13836  HANDLE handle;
13838  d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
13839  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
13840 
13841  return createResultValueType( result, handle );
13842  }
13843 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13844 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
13845 
13846  //=== VK_KHR_external_fence_fd ===
13847 
13848  template <typename Dispatch>
13849  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
13850  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13851  {
13852  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13853  return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
13854  }
13855 
13856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13857  template <typename Dispatch>
13859  Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
13860  {
13861  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13862 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13863  VULKAN_HPP_ASSERT( d.vkImportFenceFdKHR && "Function <vkImportFenceFdKHR> requires <VK_KHR_external_fence_fd>" );
13864 # endif
13865 
13867  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) );
13868  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
13869 
13870  return createResultValueType( result );
13871  }
13872 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13873 
13874  template <typename Dispatch>
13875  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
13876  int * pFd,
13877  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13878  {
13879  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13880  return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
13881  }
13882 
13883 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13884  template <typename Dispatch>
13885  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,
13886  Dispatch const & d ) const
13887  {
13888  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13889 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13890  VULKAN_HPP_ASSERT( d.vkGetFenceFdKHR && "Function <vkGetFenceFdKHR> requires <VK_KHR_external_fence_fd>" );
13891 # endif
13892 
13893  int fd;
13895  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) );
13896  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
13897 
13898  return createResultValueType( result, fd );
13899  }
13900 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13901 
13902  //=== VK_KHR_performance_query ===
13903 
13904  template <typename Dispatch>
13906  PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
13907  uint32_t * pCounterCount,
13908  VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
13909  VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
13910  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13911  {
13912  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13913  return static_cast<Result>(
13914  d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice,
13915  queueFamilyIndex,
13916  pCounterCount,
13917  reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
13918  reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
13919  }
13920 
13921 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13922  template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
13925  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
13926  PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
13927  {
13928  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13929 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13930  VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR &&
13931  "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" );
13932 # endif
13933 
13934  std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
13935  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
13936  data_;
13937  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first;
13938  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second;
13939  uint32_t counterCount;
13941  do
13942  {
13943  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
13944  d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
13945  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount )
13946  {
13947  counters.resize( counterCount );
13948  counterDescriptions.resize( counterCount );
13949  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
13950  m_physicalDevice,
13951  queueFamilyIndex,
13952  &counterCount,
13953  reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
13954  reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
13955  }
13956  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
13957  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
13958  VULKAN_HPP_ASSERT( counterCount <= counters.size() );
13959  if ( counterCount < counters.size() )
13960  {
13961  counters.resize( counterCount );
13962  counterDescriptions.resize( counterCount );
13963  }
13964  return createResultValueType( result, data_ );
13965  }
13966 
13967  template <typename PerformanceCounterKHRAllocator,
13968  typename PerformanceCounterDescriptionKHRAllocator,
13969  typename Dispatch,
13970  typename std::enable_if<
13971  std::is_same<typename PerformanceCounterKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value &&
13972  std::is_same<typename PerformanceCounterDescriptionKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value,
13973  int>::type>
13976  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
13977  PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
13978  PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
13979  PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
13980  Dispatch const & d ) const
13981  {
13982  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13983 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
13984  VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR &&
13985  "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" );
13986 # endif
13987 
13988  std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
13989  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
13990  data_(
13991  std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
13992  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first;
13993  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second;
13994  uint32_t counterCount;
13996  do
13997  {
13998  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
13999  d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
14000  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount )
14001  {
14002  counters.resize( counterCount );
14003  counterDescriptions.resize( counterCount );
14004  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
14005  m_physicalDevice,
14006  queueFamilyIndex,
14007  &counterCount,
14008  reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
14009  reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
14010  }
14011  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14012  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
14013  VULKAN_HPP_ASSERT( counterCount <= counters.size() );
14014  if ( counterCount < counters.size() )
14015  {
14016  counters.resize( counterCount );
14017  counterDescriptions.resize( counterCount );
14018  }
14019  return createResultValueType( result, data_ );
14020  }
14021 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14022 
14023  template <typename Dispatch>
14024  VULKAN_HPP_INLINE void
14025  PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
14026  uint32_t * pNumPasses,
14027  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14028  {
14029  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14030  d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
14031  m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
14032  }
14033 
14034 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14035  template <typename Dispatch>
14036  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
14037  const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14038  {
14039  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14040 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14041  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR &&
14042  "Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> requires <VK_KHR_performance_query>" );
14043 # endif
14044 
14045  uint32_t numPasses;
14046  d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
14047  m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
14048 
14049  return numPasses;
14050  }
14051 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14052 
14053  template <typename Dispatch>
14054  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,
14055  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14056  {
14057  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14058  return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
14059  }
14060 
14061 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14062  template <typename Dispatch>
14064  Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
14065  {
14066  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14067 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14068  VULKAN_HPP_ASSERT( d.vkAcquireProfilingLockKHR && "Function <vkAcquireProfilingLockKHR> requires <VK_KHR_performance_query>" );
14069 # endif
14070 
14072  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) );
14073  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
14074 
14075  return createResultValueType( result );
14076  }
14077 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14078 
14079  template <typename Dispatch>
14080  VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14081  {
14082  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14083  d.vkReleaseProfilingLockKHR( m_device );
14084  }
14085 
14086  //=== VK_KHR_get_surface_capabilities2 ===
14087 
14088  template <typename Dispatch>
14090  PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
14091  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
14092  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14093  {
14094  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14095  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
14096  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
14097  reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
14098  }
14099 
14100 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14101  template <typename Dispatch>
14103  PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
14104  {
14105  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14106 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14107  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR &&
14108  "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" );
14109 # endif
14110 
14111  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
14113  d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
14114  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
14115  reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
14116  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
14117 
14118  return createResultValueType( result, surfaceCapabilities );
14119  }
14120 
14121  template <typename X, typename Y, typename... Z, typename Dispatch>
14122  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
14123  PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
14124  {
14125  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14126 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14127  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR &&
14128  "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" );
14129 # endif
14130 
14131  StructureChain<X, Y, Z...> structureChain;
14132  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
14134  d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
14135  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
14136  reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
14137  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
14138 
14139  return createResultValueType( result, structureChain );
14140  }
14141 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14142 
14143  template <typename Dispatch>
14144  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
14145  uint32_t * pSurfaceFormatCount,
14146  VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
14147  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14148  {
14149  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14150  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
14151  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
14152  pSurfaceFormatCount,
14153  reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
14154  }
14155 
14156 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14157  template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
14159  PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
14160  {
14161  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14162 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14163  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR &&
14164  "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" );
14165 # endif
14166 
14167  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
14168  uint32_t surfaceFormatCount;
14170  do
14171  {
14172  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
14173  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
14174  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
14175  {
14176  surfaceFormats.resize( surfaceFormatCount );
14177  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
14178  d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
14179  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
14180  &surfaceFormatCount,
14181  reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
14182  }
14183  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14184  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
14185  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
14186  if ( surfaceFormatCount < surfaceFormats.size() )
14187  {
14188  surfaceFormats.resize( surfaceFormatCount );
14189  }
14190  return createResultValueType( result, surfaceFormats );
14191  }
14192 
14193  template <typename SurfaceFormat2KHRAllocator,
14194  typename Dispatch,
14195  typename std::enable_if<std::is_same<typename SurfaceFormat2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, int>::type>
14197  PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
14198  SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
14199  Dispatch const & d ) const
14200  {
14201  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14202 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14203  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR &&
14204  "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" );
14205 # endif
14206 
14207  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
14208  uint32_t surfaceFormatCount;
14210  do
14211  {
14212  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
14213  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
14214  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
14215  {
14216  surfaceFormats.resize( surfaceFormatCount );
14217  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
14218  d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
14219  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
14220  &surfaceFormatCount,
14221  reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
14222  }
14223  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14224  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
14225  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
14226  if ( surfaceFormatCount < surfaceFormats.size() )
14227  {
14228  surfaceFormats.resize( surfaceFormatCount );
14229  }
14230  return createResultValueType( result, surfaceFormats );
14231  }
14232 
14233  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
14235  PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
14236  {
14237  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14238 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14239  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR &&
14240  "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" );
14241 # endif
14242 
14243  std::vector<StructureChain, StructureChainAllocator> structureChains;
14244  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
14245  uint32_t surfaceFormatCount;
14247  do
14248  {
14249  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
14250  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
14251  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
14252  {
14253  structureChains.resize( surfaceFormatCount );
14254  surfaceFormats.resize( surfaceFormatCount );
14255  for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
14256  {
14257  surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
14258  }
14259  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
14260  d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
14261  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
14262  &surfaceFormatCount,
14263  reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
14264  }
14265  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14266  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
14267  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
14268  if ( surfaceFormatCount < surfaceFormats.size() )
14269  {
14270  structureChains.resize( surfaceFormatCount );
14271  }
14272  for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
14273  {
14274  structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
14275  }
14276  return createResultValueType( result, structureChains );
14277  }
14278 
14279  template <typename StructureChain,
14280  typename StructureChainAllocator,
14281  typename Dispatch,
14282  typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type>
14284  PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
14285  StructureChainAllocator & structureChainAllocator,
14286  Dispatch const & d ) const
14287  {
14288  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14289 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14290  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR &&
14291  "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" );
14292 # endif
14293 
14294  std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
14295  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
14296  uint32_t surfaceFormatCount;
14298  do
14299  {
14300  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
14301  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
14302  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
14303  {
14304  structureChains.resize( surfaceFormatCount );
14305  surfaceFormats.resize( surfaceFormatCount );
14306  for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
14307  {
14308  surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
14309  }
14310  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
14311  d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
14312  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
14313  &surfaceFormatCount,
14314  reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
14315  }
14316  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14317  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
14318  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
14319  if ( surfaceFormatCount < surfaceFormats.size() )
14320  {
14321  structureChains.resize( surfaceFormatCount );
14322  }
14323  for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
14324  {
14325  structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
14326  }
14327  return createResultValueType( result, structureChains );
14328  }
14329 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14330 
14331  //=== VK_KHR_get_display_properties2 ===
14332 
14333  template <typename Dispatch>
14334  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount,
14335  VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
14336  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14337  {
14338  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14339  return static_cast<Result>(
14340  d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
14341  }
14342 
14343 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14344  template <typename DisplayProperties2KHRAllocator, typename Dispatch>
14347  PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
14348  {
14349  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14350 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14351  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR &&
14352  "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" );
14353 # endif
14354 
14355  std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
14356  uint32_t propertyCount;
14358  do
14359  {
14360  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
14361  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
14362  {
14363  properties.resize( propertyCount );
14364  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
14365  d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
14366  }
14367  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14368  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
14369  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
14370  if ( propertyCount < properties.size() )
14371  {
14372  properties.resize( propertyCount );
14373  }
14374  return createResultValueType( result, properties );
14375  }
14376 
14377  template <
14378  typename DisplayProperties2KHRAllocator,
14379  typename Dispatch,
14380  typename std::enable_if<std::is_same<typename DisplayProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, int>::type>
14383  PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const
14384  {
14385  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14386 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14387  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR &&
14388  "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" );
14389 # endif
14390 
14391  std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
14392  uint32_t propertyCount;
14394  do
14395  {
14396  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
14397  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
14398  {
14399  properties.resize( propertyCount );
14400  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
14401  d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
14402  }
14403  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14404  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
14405  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
14406  if ( propertyCount < properties.size() )
14407  {
14408  properties.resize( propertyCount );
14409  }
14410  return createResultValueType( result, properties );
14411  }
14412 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14413 
14414  template <typename Dispatch>
14415  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount,
14416  VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
14417  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14418  {
14419  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14420  return static_cast<Result>(
14421  d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
14422  }
14423 
14424 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14425  template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
14428  PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
14429  {
14430  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14431 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14432  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR &&
14433  "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" );
14434 # endif
14435 
14436  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
14437  uint32_t propertyCount;
14439  do
14440  {
14441  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
14442  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
14443  {
14444  properties.resize( propertyCount );
14445  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
14446  m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
14447  }
14448  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14449  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
14450  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
14451  if ( propertyCount < properties.size() )
14452  {
14453  properties.resize( propertyCount );
14454  }
14455  return createResultValueType( result, properties );
14456  }
14457 
14458  template <
14459  typename DisplayPlaneProperties2KHRAllocator,
14460  typename Dispatch,
14461  typename std::enable_if<std::is_same<typename DisplayPlaneProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value,
14462  int>::type>
14465  PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
14466  {
14467  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14468 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14469  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR &&
14470  "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" );
14471 # endif
14472 
14473  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator );
14474  uint32_t propertyCount;
14476  do
14477  {
14478  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
14479  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
14480  {
14481  properties.resize( propertyCount );
14482  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
14483  m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
14484  }
14485  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14486  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
14487  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
14488  if ( propertyCount < properties.size() )
14489  {
14490  properties.resize( propertyCount );
14491  }
14492  return createResultValueType( result, properties );
14493  }
14494 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14495 
14496  template <typename Dispatch>
14497  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
14498  uint32_t * pPropertyCount,
14499  VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
14500  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14501  {
14502  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14503  return static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
14504  m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
14505  }
14506 
14507 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14508  template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
14511  PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
14512  {
14513  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14514 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14515  VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" );
14516 # endif
14517 
14518  std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
14519  uint32_t propertyCount;
14521  do
14522  {
14523  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
14524  d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
14525  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
14526  {
14527  properties.resize( propertyCount );
14528  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR(
14529  m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
14530  }
14531  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14532  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
14533  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
14534  if ( propertyCount < properties.size() )
14535  {
14536  properties.resize( propertyCount );
14537  }
14538  return createResultValueType( result, properties );
14539  }
14540 
14541  template <
14542  typename DisplayModeProperties2KHRAllocator,
14543  typename Dispatch,
14544  typename std::enable_if<std::is_same<typename DisplayModeProperties2KHRAllocator::value_type, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value,
14545  int>::type>
14548  PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
14549  DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
14550  Dispatch const & d ) const
14551  {
14552  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14553 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14554  VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" );
14555 # endif
14556 
14557  std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator );
14558  uint32_t propertyCount;
14560  do
14561  {
14562  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
14563  d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
14564  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
14565  {
14566  properties.resize( propertyCount );
14567  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR(
14568  m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
14569  }
14570  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
14571  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
14572  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
14573  if ( propertyCount < properties.size() )
14574  {
14575  properties.resize( propertyCount );
14576  }
14577  return createResultValueType( result, properties );
14578  }
14579 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14580 
14581  template <typename Dispatch>
14583  PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
14584  VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
14585  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14586  {
14587  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14588  return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
14589  reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
14590  reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
14591  }
14592 
14593 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14594  template <typename Dispatch>
14596  PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const
14597  {
14598  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14599 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14600  VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilities2KHR && "Function <vkGetDisplayPlaneCapabilities2KHR> requires <VK_KHR_get_display_properties2>" );
14601 # endif
14602 
14603  VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
14605  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
14606  reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
14607  reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) );
14608  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
14609 
14610  return createResultValueType( result, capabilities );
14611  }
14612 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14613 
14614 #if defined( VK_USE_PLATFORM_IOS_MVK )
14615  //=== VK_MVK_ios_surface ===
14616 
14617  template <typename Dispatch>
14618  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
14619  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14620  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
14621  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14622  {
14623  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14624  return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance,
14625  reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
14626  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
14627  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
14628  }
14629 
14630 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14631  template <typename Dispatch>
14632  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
14633  Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
14634  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
14635  Dispatch const & d ) const
14636  {
14637  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14638 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14639  VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" );
14640 # endif
14641 
14642  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
14644  d.vkCreateIOSSurfaceMVK( m_instance,
14645  reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
14646  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14647  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
14648  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
14649 
14650  return createResultValueType( result, surface );
14651  }
14652 
14653 # ifndef VULKAN_HPP_NO_SMART_HANDLE
14654  template <typename Dispatch>
14655  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
14656  Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
14657  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
14658  Dispatch const & d ) const
14659  {
14660  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14661 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14662  VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" );
14663 # endif
14664 
14665  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
14667  d.vkCreateIOSSurfaceMVK( m_instance,
14668  reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
14669  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14670  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
14671  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" );
14672 
14673  return createResultValueType(
14674  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
14675  }
14676 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
14677 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14678 #endif /*VK_USE_PLATFORM_IOS_MVK*/
14679 
14680 #if defined( VK_USE_PLATFORM_MACOS_MVK )
14681  //=== VK_MVK_macos_surface ===
14682 
14683  template <typename Dispatch>
14684  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
14685  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14686  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
14687  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14688  {
14689  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14690  return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance,
14691  reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
14692  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
14693  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
14694  }
14695 
14696 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14697  template <typename Dispatch>
14698  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
14699  Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
14700  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
14701  Dispatch const & d ) const
14702  {
14703  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14704 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14705  VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" );
14706 # endif
14707 
14708  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
14710  d.vkCreateMacOSSurfaceMVK( m_instance,
14711  reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
14712  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14713  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
14714  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
14715 
14716  return createResultValueType( result, surface );
14717  }
14718 
14719 # ifndef VULKAN_HPP_NO_SMART_HANDLE
14720  template <typename Dispatch>
14721  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
14722  Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
14723  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
14724  Dispatch const & d ) const
14725  {
14726  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14727 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14728  VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" );
14729 # endif
14730 
14731  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
14733  d.vkCreateMacOSSurfaceMVK( m_instance,
14734  reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
14735  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14736  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
14737  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" );
14738 
14739  return createResultValueType(
14740  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
14741  }
14742 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
14743 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14744 #endif /*VK_USE_PLATFORM_MACOS_MVK*/
14745 
14746  //=== VK_EXT_debug_utils ===
14747 
14748  template <typename Dispatch>
14749  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,
14750  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14751  {
14752  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14753  return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
14754  }
14755 
14756 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14757  template <typename Dispatch>
14759  Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
14760  {
14761  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14762 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14763  VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectNameEXT && "Function <vkSetDebugUtilsObjectNameEXT> requires <VK_EXT_debug_utils>" );
14764 # endif
14765 
14767  d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) );
14768  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
14769 
14770  return createResultValueType( result );
14771  }
14772 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14773 
14774  template <typename Dispatch>
14775  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,
14776  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14777  {
14778  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14779  return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
14780  }
14781 
14782 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14783  template <typename Dispatch>
14785  Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
14786  {
14787  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14788 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14789  VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectTagEXT && "Function <vkSetDebugUtilsObjectTagEXT> requires <VK_EXT_debug_utils>" );
14790 # endif
14791 
14793  d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) );
14794  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
14795 
14796  return createResultValueType( result );
14797  }
14798 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14799 
14800  template <typename Dispatch>
14801  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
14802  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14803  {
14804  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14805  d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
14806  }
14807 
14808 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14809  template <typename Dispatch>
14810  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
14811  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14812  {
14813  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14814 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14815  VULKAN_HPP_ASSERT( d.vkQueueBeginDebugUtilsLabelEXT && "Function <vkQueueBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
14816 # endif
14817 
14818  d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
14819  }
14820 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14821 
14822  template <typename Dispatch>
14823  VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14824  {
14825  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14826  d.vkQueueEndDebugUtilsLabelEXT( m_queue );
14827  }
14828 
14829  template <typename Dispatch>
14830  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
14831  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14832  {
14833  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14834  d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
14835  }
14836 
14837 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14838  template <typename Dispatch>
14839  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
14840  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14841  {
14842  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14843 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14844  VULKAN_HPP_ASSERT( d.vkQueueInsertDebugUtilsLabelEXT && "Function <vkQueueInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
14845 # endif
14846 
14847  d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
14848  }
14849 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14850 
14851  template <typename Dispatch>
14852  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
14853  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14854  {
14855  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14856  d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
14857  }
14858 
14859 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14860  template <typename Dispatch>
14861  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
14862  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14863  {
14864  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14865 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14866  VULKAN_HPP_ASSERT( d.vkCmdBeginDebugUtilsLabelEXT && "Function <vkCmdBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
14867 # endif
14868 
14869  d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
14870  }
14871 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14872 
14873  template <typename Dispatch>
14874  VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14875  {
14876  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14877  d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
14878  }
14879 
14880  template <typename Dispatch>
14881  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
14882  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14883  {
14884  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14885  d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
14886  }
14887 
14888 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14889  template <typename Dispatch>
14890  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
14891  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14892  {
14893  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14894 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14895  VULKAN_HPP_ASSERT( d.vkCmdInsertDebugUtilsLabelEXT && "Function <vkCmdInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
14896 # endif
14897 
14898  d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
14899  }
14900 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14901 
14902  template <typename Dispatch>
14904  Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
14905  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14906  VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
14907  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14908  {
14909  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14910  return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance,
14911  reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
14912  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
14913  reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
14914  }
14915 
14916 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14917  template <typename Dispatch>
14919  Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
14921  Dispatch const & d ) const
14922  {
14923  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14924 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14925  VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" );
14926 # endif
14927 
14928  VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
14929  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT(
14930  m_instance,
14931  reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
14932  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14933  reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
14934  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
14935 
14936  return createResultValueType( result, messenger );
14937  }
14938 
14939 # ifndef VULKAN_HPP_NO_SMART_HANDLE
14940  template <typename Dispatch>
14942  Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
14944  Dispatch const & d ) const
14945  {
14946  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14947 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14948  VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" );
14949 # endif
14950 
14951  VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
14952  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT(
14953  m_instance,
14954  reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
14955  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14956  reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
14957  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" );
14958 
14959  return createResultValueType(
14961  }
14962 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
14963 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14964 
14965  template <typename Dispatch>
14966  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
14967  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14968  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14969  {
14970  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14971  d.vkDestroyDebugUtilsMessengerEXT(
14972  m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14973  }
14974 
14975 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14976  template <typename Dispatch>
14977  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
14979  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14980  {
14981  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14982 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
14983  VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" );
14984 # endif
14985 
14986  d.vkDestroyDebugUtilsMessengerEXT(
14987  m_instance,
14988  static_cast<VkDebugUtilsMessengerEXT>( messenger ),
14989  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14990  }
14991 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14992 
14993  template <typename Dispatch>
14994  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
14995  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14996  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14997  {
14998  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14999  d.vkDestroyDebugUtilsMessengerEXT(
15000  m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
15001  }
15002 
15003 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15004  template <typename Dispatch>
15005  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
15007  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15008  {
15009  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15010 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15011  VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" );
15012 # endif
15013 
15014  d.vkDestroyDebugUtilsMessengerEXT(
15015  m_instance,
15016  static_cast<VkDebugUtilsMessengerEXT>( messenger ),
15017  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
15018  }
15019 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15020 
15021  template <typename Dispatch>
15022  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
15024  const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
15025  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15026  {
15027  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15028  d.vkSubmitDebugUtilsMessageEXT( m_instance,
15029  static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
15030  static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
15031  reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
15032  }
15033 
15034 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15035  template <typename Dispatch>
15036  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
15038  const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,
15039  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15040  {
15041  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15042 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15043  VULKAN_HPP_ASSERT( d.vkSubmitDebugUtilsMessageEXT && "Function <vkSubmitDebugUtilsMessageEXT> requires <VK_EXT_debug_utils>" );
15044 # endif
15045 
15046  d.vkSubmitDebugUtilsMessageEXT( m_instance,
15047  static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
15048  static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
15049  reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
15050  }
15051 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15052 
15053 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
15054  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
15055 
15056  template <typename Dispatch>
15058  Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer,
15059  VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
15060  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15061  {
15062  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15063  return static_cast<Result>(
15064  d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
15065  }
15066 
15067 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15068  template <typename Dispatch>
15069  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
15070  Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
15071  {
15072  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15073 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15074  VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID &&
15075  "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" );
15076 # endif
15077 
15078  VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
15080  d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
15081  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
15082 
15083  return createResultValueType( result, properties );
15084  }
15085 
15086  template <typename X, typename Y, typename... Z, typename Dispatch>
15087  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
15088  Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
15089  {
15090  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15091 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15092  VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID &&
15093  "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" );
15094 # endif
15095 
15096  StructureChain<X, Y, Z...> structureChain;
15097  VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
15098  structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
15100  d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
15101  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
15102 
15103  return createResultValueType( result, structureChain );
15104  }
15105 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15106 
15107  template <typename Dispatch>
15109  Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
15110  struct AHardwareBuffer ** pBuffer,
15111  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15112  {
15113  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15114  return static_cast<Result>(
15115  d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
15116  }
15117 
15118 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15119  template <typename Dispatch>
15120  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type
15121  Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const
15122  {
15123  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15124 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15125  VULKAN_HPP_ASSERT( d.vkGetMemoryAndroidHardwareBufferANDROID &&
15126  "Function <vkGetMemoryAndroidHardwareBufferANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" );
15127 # endif
15128 
15129  struct AHardwareBuffer * buffer;
15131  d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) );
15132  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
15133 
15134  return createResultValueType( result, buffer );
15135  }
15136 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15137 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
15138 
15139 #if defined( VK_ENABLE_BETA_EXTENSIONS )
15140  //=== VK_AMDX_shader_enqueue ===
15141 
15142  template <typename Dispatch>
15144  Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
15145  uint32_t createInfoCount,
15146  const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,
15147  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
15148  VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
15149  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15150  {
15151  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15152  return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device,
15153  static_cast<VkPipelineCache>( pipelineCache ),
15154  createInfoCount,
15155  reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ),
15156  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
15157  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
15158  }
15159 
15160 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15161  template <typename PipelineAllocator, typename Dispatch>
15163  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
15164  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
15166  Dispatch const & d ) const
15167  {
15168  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15169 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15170  VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" );
15171 # endif
15172 
15173  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
15174  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX(
15175  m_device,
15176  static_cast<VkPipelineCache>( pipelineCache ),
15177  createInfos.size(),
15178  reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
15179  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15180  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
15181  resultCheck( result,
15182  VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX",
15183  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
15184 
15186  }
15187 
15188  template <typename PipelineAllocator,
15189  typename Dispatch,
15190  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type>
15192  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
15193  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
15195  PipelineAllocator & pipelineAllocator,
15196  Dispatch const & d ) const
15197  {
15198  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15199 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15200  VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" );
15201 # endif
15202 
15203  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
15204  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX(
15205  m_device,
15206  static_cast<VkPipelineCache>( pipelineCache ),
15207  createInfos.size(),
15208  reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
15209  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15210  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
15211  resultCheck( result,
15212  VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX",
15213  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
15214 
15216  }
15217 
15218  template <typename Dispatch>
15220  Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
15221  const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
15223  Dispatch const & d ) const
15224  {
15225  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15226 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15227  VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" );
15228 # endif
15229 
15230  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
15231  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX(
15232  m_device,
15233  static_cast<VkPipelineCache>( pipelineCache ),
15234  1,
15235  reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
15236  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15237  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
15238  resultCheck( result,
15239  VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX",
15240  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
15241 
15242  return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
15243  }
15244 
15245 # ifndef VULKAN_HPP_NO_SMART_HANDLE
15246  template <typename Dispatch, typename PipelineAllocator>
15248  Device::createExecutionGraphPipelinesAMDXUnique(
15249  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
15250  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
15252  Dispatch const & d ) const
15253  {
15254  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15255 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15256  VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" );
15257 # endif
15258 
15259  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
15260  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX(
15261  m_device,
15262  static_cast<VkPipelineCache>( pipelineCache ),
15263  createInfos.size(),
15264  reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
15265  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15266  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
15267  resultCheck( result,
15268  VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique",
15269  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
15270  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
15271  uniquePipelines.reserve( createInfos.size() );
15272  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
15273  for ( auto const & pipeline : pipelines )
15274  {
15275  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
15276  }
15278  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
15279  }
15280 
15281  template <
15282  typename Dispatch,
15283  typename PipelineAllocator,
15284  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type>
15286  Device::createExecutionGraphPipelinesAMDXUnique(
15287  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
15288  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
15290  PipelineAllocator & pipelineAllocator,
15291  Dispatch const & d ) const
15292  {
15293  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15294 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15295  VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" );
15296 # endif
15297 
15298  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
15299  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX(
15300  m_device,
15301  static_cast<VkPipelineCache>( pipelineCache ),
15302  createInfos.size(),
15303  reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
15304  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15305  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
15306  resultCheck( result,
15307  VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique",
15308  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
15309  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
15310  uniquePipelines.reserve( createInfos.size() );
15311  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
15312  for ( auto const & pipeline : pipelines )
15313  {
15314  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
15315  }
15317  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
15318  }
15319 
15320  template <typename Dispatch>
15322  Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
15323  const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
15325  Dispatch const & d ) const
15326  {
15327  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15328 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15329  VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" );
15330 # endif
15331 
15332  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
15333  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX(
15334  m_device,
15335  static_cast<VkPipelineCache>( pipelineCache ),
15336  1,
15337  reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
15338  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15339  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
15340  resultCheck( result,
15341  VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique",
15342  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
15343 
15345  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15347  }
15348 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
15349 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15350 
15351  template <typename Dispatch>
15353  Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
15354  VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,
15355  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15356  {
15357  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15358  return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX(
15359  m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) );
15360  }
15361 
15362 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15363  template <typename Dispatch>
15365  Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const
15366  {
15367  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15368 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15369  VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineScratchSizeAMDX &&
15370  "Function <vkGetExecutionGraphPipelineScratchSizeAMDX> requires <VK_AMDX_shader_enqueue>" );
15371 # endif
15372 
15373  VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo;
15374  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX(
15375  m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) ) );
15376  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" );
15377 
15378  return createResultValueType( result, sizeInfo );
15379  }
15380 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15381 
15382  template <typename Dispatch>
15384  Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
15385  const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,
15386  uint32_t * pNodeIndex,
15387  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15388  {
15389  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15390  return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX(
15391  m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), pNodeIndex ) );
15392  }
15393 
15394 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15395  template <typename Dispatch>
15396  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type Device::getExecutionGraphPipelineNodeIndexAMDX(
15397  VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const
15398  {
15399  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15400 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15401  VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineNodeIndexAMDX && "Function <vkGetExecutionGraphPipelineNodeIndexAMDX> requires <VK_AMDX_shader_enqueue>" );
15402 # endif
15403 
15404  uint32_t nodeIndex;
15405  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX(
15406  m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ), &nodeIndex ) );
15407  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" );
15408 
15409  return createResultValueType( result, nodeIndex );
15410  }
15411 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15412 
15413  template <typename Dispatch>
15414  VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
15415  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15416  {
15417  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15418  d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ) );
15419  }
15420 
15421  template <typename Dispatch>
15422  VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
15423  const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
15424  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15425  {
15426  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15427  d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
15428  }
15429 
15430 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15431  template <typename Dispatch>
15432  VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
15433  const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
15434  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15435  {
15436  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15437 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15438  VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" );
15439 # endif
15440 
15441  d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
15442  }
15443 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15444 
15445  template <typename Dispatch>
15446  VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
15447  const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
15448  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15449  {
15450  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15451  d.vkCmdDispatchGraphIndirectAMDX(
15452  m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
15453  }
15454 
15455 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15456  template <typename Dispatch>
15457  VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
15458  const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
15459  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15460  {
15461  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15462 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15463  VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" );
15464 # endif
15465 
15466  d.vkCmdDispatchGraphIndirectAMDX(
15467  m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
15468  }
15469 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15470 
15471  template <typename Dispatch>
15472  VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
15474  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15475  {
15476  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15477  d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) );
15478  }
15479 #endif /*VK_ENABLE_BETA_EXTENSIONS*/
15480 
15481  //=== VK_EXT_sample_locations ===
15482 
15483  template <typename Dispatch>
15484  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
15485  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15486  {
15487  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15488  d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
15489  }
15490 
15491 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15492  template <typename Dispatch>
15493  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,
15494  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15495  {
15496  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15497 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15498  VULKAN_HPP_ASSERT( d.vkCmdSetSampleLocationsEXT && "Function <vkCmdSetSampleLocationsEXT> requires <VK_EXT_sample_locations>" );
15499 # endif
15500 
15501  d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
15502  }
15503 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15504 
15505  template <typename Dispatch>
15506  VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
15507  VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
15508  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15509  {
15510  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15511  d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
15512  m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
15513  }
15514 
15515 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15516  template <typename Dispatch>
15517  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
15518  PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15519  {
15520  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15521 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15522  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMultisamplePropertiesEXT &&
15523  "Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> requires <VK_EXT_sample_locations>" );
15524 # endif
15525 
15526  VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
15527  d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
15528  m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
15529 
15530  return multisampleProperties;
15531  }
15532 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15533 
15534  //=== VK_KHR_get_memory_requirements2 ===
15535 
15536  template <typename Dispatch>
15537  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
15538  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
15539  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15540  {
15541  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15542  d.vkGetImageMemoryRequirements2KHR(
15543  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
15544  }
15545 
15546 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15547  template <typename Dispatch>
15548  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
15549  Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15550  {
15551  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15552 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15553  VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR &&
15554  "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
15555 # endif
15556 
15557  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
15558  d.vkGetImageMemoryRequirements2KHR(
15559  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
15560 
15561  return memoryRequirements;
15562  }
15563 
15564  template <typename X, typename Y, typename... Z, typename Dispatch>
15565  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
15566  Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15567  {
15568  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15569 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15570  VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR &&
15571  "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
15572 # endif
15573 
15574  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
15575  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
15576  d.vkGetImageMemoryRequirements2KHR(
15577  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
15578 
15579  return structureChain;
15580  }
15581 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15582 
15583  template <typename Dispatch>
15584  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
15585  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
15586  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15587  {
15588  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15589  d.vkGetBufferMemoryRequirements2KHR(
15590  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
15591  }
15592 
15593 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15594  template <typename Dispatch>
15595  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
15596  Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15597  {
15598  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15599 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15600  VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR &&
15601  "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
15602 # endif
15603 
15604  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
15605  d.vkGetBufferMemoryRequirements2KHR(
15606  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
15607 
15608  return memoryRequirements;
15609  }
15610 
15611  template <typename X, typename Y, typename... Z, typename Dispatch>
15612  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
15613  Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15614  {
15615  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15616 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15617  VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR &&
15618  "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
15619 # endif
15620 
15621  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
15622  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
15623  d.vkGetBufferMemoryRequirements2KHR(
15624  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
15625 
15626  return structureChain;
15627  }
15628 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15629 
15630  template <typename Dispatch>
15631  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
15632  uint32_t * pSparseMemoryRequirementCount,
15633  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
15634  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15635  {
15636  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15637  d.vkGetImageSparseMemoryRequirements2KHR( m_device,
15638  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
15639  pSparseMemoryRequirementCount,
15640  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
15641  }
15642 
15643 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15644  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
15645  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
15646  Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
15647  {
15648  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15649 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15650  VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR &&
15651  "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
15652 # endif
15653 
15654  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
15655  uint32_t sparseMemoryRequirementCount;
15656  d.vkGetImageSparseMemoryRequirements2KHR(
15657  m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
15658  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
15659  d.vkGetImageSparseMemoryRequirements2KHR( m_device,
15660  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
15661  &sparseMemoryRequirementCount,
15662  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
15663 
15664  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
15665  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
15666  {
15667  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
15668  }
15669  return sparseMemoryRequirements;
15670  }
15671 
15672  template <typename SparseImageMemoryRequirements2Allocator,
15673  typename Dispatch,
15674  typename std::enable_if<
15675  std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value,
15676  int>::type>
15677  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
15678  Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
15679  SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
15680  Dispatch const & d ) const
15681  {
15682  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15683 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15684  VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR &&
15685  "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
15686 # endif
15687 
15688  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
15689  sparseImageMemoryRequirements2Allocator );
15690  uint32_t sparseMemoryRequirementCount;
15691  d.vkGetImageSparseMemoryRequirements2KHR(
15692  m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
15693  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
15694  d.vkGetImageSparseMemoryRequirements2KHR( m_device,
15695  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
15696  &sparseMemoryRequirementCount,
15697  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
15698 
15699  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
15700  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
15701  {
15702  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
15703  }
15704  return sparseMemoryRequirements;
15705  }
15706 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15707 
15708  //=== VK_KHR_acceleration_structure ===
15709 
15710  template <typename Dispatch>
15712  Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
15713  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
15714  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
15715  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15716  {
15717  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15718  return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device,
15719  reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
15720  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
15721  reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
15722  }
15723 
15724 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15725  template <typename Dispatch>
15727  Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
15729  Dispatch const & d ) const
15730  {
15731  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15732 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15733  VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
15734 # endif
15735 
15736  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
15737  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR(
15738  m_device,
15739  reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
15740  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15741  reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
15742  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
15743 
15744  return createResultValueType( result, accelerationStructure );
15745  }
15746 
15747 # ifndef VULKAN_HPP_NO_SMART_HANDLE
15748  template <typename Dispatch>
15750  Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
15752  Dispatch const & d ) const
15753  {
15754  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15755 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15756  VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
15757 # endif
15758 
15759  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
15760  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR(
15761  m_device,
15762  reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
15763  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15764  reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
15765  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" );
15766 
15767  return createResultValueType(
15768  result,
15770  }
15771 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
15772 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15773 
15774  template <typename Dispatch>
15775  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
15776  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
15777  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15778  {
15779  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15780  d.vkDestroyAccelerationStructureKHR(
15781  m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
15782  }
15783 
15784 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15785  template <typename Dispatch>
15786  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
15788  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15789  {
15790  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15791 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15792  VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
15793 # endif
15794 
15795  d.vkDestroyAccelerationStructureKHR(
15796  m_device,
15797  static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
15798  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
15799  }
15800 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15801 
15802  template <typename Dispatch>
15803  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
15804  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
15805  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15806  {
15807  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15808  d.vkDestroyAccelerationStructureKHR(
15809  m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
15810  }
15811 
15812 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15813  template <typename Dispatch>
15814  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
15816  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15817  {
15818  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15819 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15820  VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
15821 # endif
15822 
15823  d.vkDestroyAccelerationStructureKHR(
15824  m_device,
15825  static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
15826  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
15827  }
15828 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15829 
15830  template <typename Dispatch>
15831  VULKAN_HPP_INLINE void
15832  CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount,
15833  const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
15834  const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
15835  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15836  {
15837  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15838  d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
15839  infoCount,
15840  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
15841  reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
15842  }
15843 
15844 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15845  template <typename Dispatch>
15846  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
15847  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
15848  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
15849  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
15850  {
15851  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15852 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15853  VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresKHR && "Function <vkCmdBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" );
15854 # endif
15855 # ifdef VULKAN_HPP_NO_EXCEPTIONS
15856  VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
15857 # else
15858  if ( infos.size() != pBuildRangeInfos.size() )
15859  {
15860  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
15861  }
15862 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
15863 
15864  d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
15865  infos.size(),
15866  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
15867  reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
15868  }
15869 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15870 
15871  template <typename Dispatch>
15872  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount,
15873  const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
15874  const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
15875  const uint32_t * pIndirectStrides,
15876  const uint32_t * const * ppMaxPrimitiveCounts,
15877  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15878  {
15879  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15880  d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
15881  infoCount,
15882  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
15883  reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
15884  pIndirectStrides,
15885  ppMaxPrimitiveCounts );
15886  }
15887 
15888 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15889  template <typename Dispatch>
15890  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
15891  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
15892  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
15893  VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
15894  VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
15895  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
15896  {
15897  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15898 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15899  VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresIndirectKHR &&
15900  "Function <vkCmdBuildAccelerationStructuresIndirectKHR> requires <VK_KHR_acceleration_structure>" );
15901 # endif
15902 # ifdef VULKAN_HPP_NO_EXCEPTIONS
15903  VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
15904  VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
15905  VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
15906 # else
15907  if ( infos.size() != indirectDeviceAddresses.size() )
15908  {
15909  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
15910  }
15911  if ( infos.size() != indirectStrides.size() )
15912  {
15913  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
15914  }
15915  if ( infos.size() != pMaxPrimitiveCounts.size() )
15916  {
15917  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
15918  }
15919 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
15920 
15921  d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
15922  infos.size(),
15923  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
15924  reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
15925  indirectStrides.data(),
15926  pMaxPrimitiveCounts.data() );
15927  }
15928 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15929 
15930  template <typename Dispatch>
15932  Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
15933  uint32_t infoCount,
15934  const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
15935  const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
15936  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15937  {
15938  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15939  return static_cast<Result>(
15940  d.vkBuildAccelerationStructuresKHR( m_device,
15941  static_cast<VkDeferredOperationKHR>( deferredOperation ),
15942  infoCount,
15943  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
15944  reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
15945  }
15946 
15947 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15948  template <typename Dispatch>
15950  VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
15951  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
15952  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
15953  Dispatch const & d ) const
15954  {
15955  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15956 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
15957  VULKAN_HPP_ASSERT( d.vkBuildAccelerationStructuresKHR && "Function <vkBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" );
15958 # endif
15959 # ifdef VULKAN_HPP_NO_EXCEPTIONS
15960  VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
15961 # else
15962  if ( infos.size() != pBuildRangeInfos.size() )
15963  {
15964  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
15965  }
15966 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
15967 
15969  d.vkBuildAccelerationStructuresKHR( m_device,
15970  static_cast<VkDeferredOperationKHR>( deferredOperation ),
15971  infos.size(),
15972  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
15973  reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) );
15974  resultCheck(
15975  result,
15976  VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
15977  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
15978 
15979  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
15980  }
15981 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15982 
15983  template <typename Dispatch>
15984  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
15985  const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
15986  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15987  {
15988  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15989  return static_cast<Result>( d.vkCopyAccelerationStructureKHR(
15990  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
15991  }
15992 
15993 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15994  template <typename Dispatch>
15996  Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
15997  const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
15998  Dispatch const & d ) const
15999  {
16000  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16001 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16002  VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureKHR && "Function <vkCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
16003 # endif
16004 
16005  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureKHR(
16006  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) );
16007  resultCheck(
16008  result,
16009  VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
16010  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
16011 
16012  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
16013  }
16014 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16015 
16016  template <typename Dispatch>
16018  Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16019  const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
16020  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16021  {
16022  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16023  return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
16024  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
16025  }
16026 
16027 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16028  template <typename Dispatch>
16030  Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16031  const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
16032  Dispatch const & d ) const
16033  {
16034  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16035 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16036  VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureToMemoryKHR &&
16037  "Function <vkCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" );
16038 # endif
16039 
16040  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureToMemoryKHR(
16041  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) );
16042  resultCheck(
16043  result,
16044  VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
16045  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
16046 
16047  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
16048  }
16049 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16050 
16051  template <typename Dispatch>
16053  Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16054  const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
16055  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16056  {
16057  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16058  return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
16059  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
16060  }
16061 
16062 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16063  template <typename Dispatch>
16065  Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16066  const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
16067  Dispatch const & d ) const
16068  {
16069  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16070 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16071  VULKAN_HPP_ASSERT( d.vkCopyMemoryToAccelerationStructureKHR &&
16072  "Function <vkCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
16073 # endif
16074 
16075  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToAccelerationStructureKHR(
16076  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) );
16077  resultCheck(
16078  result,
16079  VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
16080  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
16081 
16082  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
16083  }
16084 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16085 
16086  template <typename Dispatch>
16088  Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
16089  const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
16091  size_t dataSize,
16092  void * pData,
16093  size_t stride,
16094  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16095  {
16096  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16097  return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
16098  accelerationStructureCount,
16099  reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
16100  static_cast<VkQueryType>( queryType ),
16101  dataSize,
16102  pData,
16103  stride ) );
16104  }
16105 
16106 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16107  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
16109  Device::writeAccelerationStructuresPropertiesKHR(
16110  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
16112  size_t dataSize,
16113  size_t stride,
16114  Dispatch const & d ) const
16115  {
16116  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16117 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16118  VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR &&
16119  "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" );
16120 # endif
16121 
16122  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
16123  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
16125  d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
16126  accelerationStructures.size(),
16127  reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
16128  static_cast<VkQueryType>( queryType ),
16129  data.size() * sizeof( DataType ),
16130  reinterpret_cast<void *>( data.data() ),
16131  stride ) );
16132  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
16133 
16134  return createResultValueType( result, data );
16135  }
16136 
16137  template <typename DataType, typename Dispatch>
16138  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeAccelerationStructuresPropertyKHR(
16139  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
16141  size_t stride,
16142  Dispatch const & d ) const
16143  {
16144  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16145 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16146  VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR &&
16147  "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" );
16148 # endif
16149 
16150  DataType data;
16152  d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
16153  accelerationStructures.size(),
16154  reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
16155  static_cast<VkQueryType>( queryType ),
16156  sizeof( DataType ),
16157  reinterpret_cast<void *>( &data ),
16158  stride ) );
16159  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
16160 
16161  return createResultValueType( result, data );
16162  }
16163 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16164 
16165  template <typename Dispatch>
16166  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
16167  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16168  {
16169  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16170  d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
16171  }
16172 
16173 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16174  template <typename Dispatch>
16175  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
16176  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16177  {
16178  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16179 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16180  VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureKHR && "Function <vkCmdCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
16181 # endif
16182 
16183  d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
16184  }
16185 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16186 
16187  template <typename Dispatch>
16188  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
16189  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16190  {
16191  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16192  d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
16193  }
16194 
16195 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16196  template <typename Dispatch>
16197  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
16198  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16199  {
16200  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16201 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16202  VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureToMemoryKHR &&
16203  "Function <vkCmdCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" );
16204 # endif
16205 
16206  d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
16207  }
16208 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16209 
16210  template <typename Dispatch>
16211  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
16212  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16213  {
16214  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16215  d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
16216  }
16217 
16218 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16219  template <typename Dispatch>
16220  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
16221  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16222  {
16223  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16224 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16225  VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToAccelerationStructureKHR &&
16226  "Function <vkCmdCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
16227 # endif
16228 
16229  d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
16230  }
16231 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16232 
16233  template <typename Dispatch>
16234  VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
16235  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16236  {
16237  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16238  return static_cast<DeviceAddress>(
16239  d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
16240  }
16241 
16242 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16243  template <typename Dispatch>
16245  Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,
16246  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16247  {
16248  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16249 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16250  VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureDeviceAddressKHR &&
16251  "Function <vkGetAccelerationStructureDeviceAddressKHR> requires <VK_KHR_acceleration_structure>" );
16252 # endif
16253 
16254  VkDeviceAddress result =
16255  d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
16256 
16257  return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
16258  }
16259 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16260 
16261  template <typename Dispatch>
16262  VULKAN_HPP_INLINE void
16263  CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
16264  const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
16266  VULKAN_HPP_NAMESPACE::QueryPool queryPool,
16267  uint32_t firstQuery,
16268  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16269  {
16270  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16271  d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
16272  accelerationStructureCount,
16273  reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
16274  static_cast<VkQueryType>( queryType ),
16275  static_cast<VkQueryPool>( queryPool ),
16276  firstQuery );
16277  }
16278 
16279 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16280  template <typename Dispatch>
16281  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
16282  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
16284  VULKAN_HPP_NAMESPACE::QueryPool queryPool,
16285  uint32_t firstQuery,
16286  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16287  {
16288  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16289 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16290  VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesKHR &&
16291  "Function <vkCmdWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" );
16292 # endif
16293 
16294  d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
16295  accelerationStructures.size(),
16296  reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
16297  static_cast<VkQueryType>( queryType ),
16298  static_cast<VkQueryPool>( queryPool ),
16299  firstQuery );
16300  }
16301 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16302 
16303  template <typename Dispatch>
16304  VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
16306  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16307  {
16308  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16309  d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
16310  reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
16311  reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
16312  }
16313 
16314 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16315  template <typename Dispatch>
16317  Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,
16318  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16319  {
16320  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16321 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16322  VULKAN_HPP_ASSERT( d.vkGetDeviceAccelerationStructureCompatibilityKHR &&
16323  "Function <vkGetDeviceAccelerationStructureCompatibilityKHR> requires <VK_KHR_acceleration_structure>" );
16324 # endif
16325 
16327  d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
16328  reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
16329  reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
16330 
16331  return compatibility;
16332  }
16333 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16334 
16335  template <typename Dispatch>
16336  VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
16337  const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
16338  const uint32_t * pMaxPrimitiveCounts,
16339  VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
16340  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16341  {
16342  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16343  d.vkGetAccelerationStructureBuildSizesKHR( m_device,
16344  static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
16345  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
16346  pMaxPrimitiveCounts,
16347  reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
16348  }
16349 
16350 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16351  template <typename Dispatch>
16352  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
16353  Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
16354  const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
16355  VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts,
16356  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
16357  {
16358  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16359 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16360  VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureBuildSizesKHR &&
16361  "Function <vkGetAccelerationStructureBuildSizesKHR> requires <VK_KHR_acceleration_structure>" );
16362 # endif
16363 # ifdef VULKAN_HPP_NO_EXCEPTIONS
16364  VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
16365 # else
16366  if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
16367  {
16368  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
16369  }
16370 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
16371 
16372  VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
16373  d.vkGetAccelerationStructureBuildSizesKHR( m_device,
16374  static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
16375  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
16376  maxPrimitiveCounts.data(),
16377  reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
16378 
16379  return sizeInfo;
16380  }
16381 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16382 
16383  //=== VK_KHR_ray_tracing_pipeline ===
16384 
16385  template <typename Dispatch>
16386  VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
16387  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
16388  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
16389  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
16390  uint32_t width,
16391  uint32_t height,
16392  uint32_t depth,
16393  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16394  {
16395  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16396  d.vkCmdTraceRaysKHR( m_commandBuffer,
16397  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
16398  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
16399  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
16400  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
16401  width,
16402  height,
16403  depth );
16404  }
16405 
16406 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16407  template <typename Dispatch>
16408  VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
16409  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
16410  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
16411  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
16412  uint32_t width,
16413  uint32_t height,
16414  uint32_t depth,
16415  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16416  {
16417  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16418 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16419  VULKAN_HPP_ASSERT( d.vkCmdTraceRaysKHR && "Function <vkCmdTraceRaysKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16420 # endif
16421 
16422  d.vkCmdTraceRaysKHR( m_commandBuffer,
16423  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
16424  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
16425  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
16426  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
16427  width,
16428  height,
16429  depth );
16430  }
16431 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16432 
16433  template <typename Dispatch>
16435  Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16436  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
16437  uint32_t createInfoCount,
16438  const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
16439  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
16440  VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
16441  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16442  {
16443  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16444  return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device,
16445  static_cast<VkDeferredOperationKHR>( deferredOperation ),
16446  static_cast<VkPipelineCache>( pipelineCache ),
16447  createInfoCount,
16448  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
16449  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
16450  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
16451  }
16452 
16453 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16454  template <typename PipelineAllocator, typename Dispatch>
16456  Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16457  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
16458  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
16460  Dispatch const & d ) const
16461  {
16462  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16463 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16464  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16465 # endif
16466 
16467  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
16468  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR(
16469  m_device,
16470  static_cast<VkDeferredOperationKHR>( deferredOperation ),
16471  static_cast<VkPipelineCache>( pipelineCache ),
16472  createInfos.size(),
16473  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
16474  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16475  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
16476  resultCheck( result,
16477  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
16478  { VULKAN_HPP_NAMESPACE::Result::eSuccess,
16479  VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
16480  VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
16481  VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
16482 
16484  }
16485 
16486  template <typename PipelineAllocator,
16487  typename Dispatch,
16488  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type>
16490  Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16491  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
16492  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
16494  PipelineAllocator & pipelineAllocator,
16495  Dispatch const & d ) const
16496  {
16497  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16498 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16499  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16500 # endif
16501 
16502  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
16503  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR(
16504  m_device,
16505  static_cast<VkDeferredOperationKHR>( deferredOperation ),
16506  static_cast<VkPipelineCache>( pipelineCache ),
16507  createInfos.size(),
16508  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
16509  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16510  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
16511  resultCheck( result,
16512  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
16513  { VULKAN_HPP_NAMESPACE::Result::eSuccess,
16514  VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
16515  VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
16516  VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
16517 
16519  }
16520 
16521  template <typename Dispatch>
16523  Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16524  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
16525  const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
16527  Dispatch const & d ) const
16528  {
16529  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16530 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16531  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16532 # endif
16533 
16534  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
16535  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR(
16536  m_device,
16537  static_cast<VkDeferredOperationKHR>( deferredOperation ),
16538  static_cast<VkPipelineCache>( pipelineCache ),
16539  1,
16540  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
16541  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16542  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
16543  resultCheck( result,
16544  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR",
16545  { VULKAN_HPP_NAMESPACE::Result::eSuccess,
16546  VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
16547  VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
16548  VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
16549 
16550  return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
16551  }
16552 
16553 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16554  template <typename Dispatch, typename PipelineAllocator>
16556  Device::createRayTracingPipelinesKHRUnique(
16557  VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16558  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
16559  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
16561  Dispatch const & d ) const
16562  {
16563  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16564 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16565  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16566 # endif
16567 
16568  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
16569  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR(
16570  m_device,
16571  static_cast<VkDeferredOperationKHR>( deferredOperation ),
16572  static_cast<VkPipelineCache>( pipelineCache ),
16573  createInfos.size(),
16574  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
16575  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16576  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
16577  resultCheck( result,
16578  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
16579  { VULKAN_HPP_NAMESPACE::Result::eSuccess,
16580  VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
16581  VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
16582  VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
16583  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
16584  uniquePipelines.reserve( createInfos.size() );
16585  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
16586  for ( auto const & pipeline : pipelines )
16587  {
16588  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
16589  }
16591  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
16592  }
16593 
16594  template <
16595  typename Dispatch,
16596  typename PipelineAllocator,
16597  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type>
16599  Device::createRayTracingPipelinesKHRUnique(
16600  VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16601  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
16602  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
16604  PipelineAllocator & pipelineAllocator,
16605  Dispatch const & d ) const
16606  {
16607  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16608 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16609  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16610 # endif
16611 
16612  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
16613  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR(
16614  m_device,
16615  static_cast<VkDeferredOperationKHR>( deferredOperation ),
16616  static_cast<VkPipelineCache>( pipelineCache ),
16617  createInfos.size(),
16618  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
16619  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16620  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
16621  resultCheck( result,
16622  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
16623  { VULKAN_HPP_NAMESPACE::Result::eSuccess,
16624  VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
16625  VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
16626  VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
16627  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
16628  uniquePipelines.reserve( createInfos.size() );
16629  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
16630  for ( auto const & pipeline : pipelines )
16631  {
16632  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
16633  }
16635  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
16636  }
16637 
16638  template <typename Dispatch>
16640  Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
16641  VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
16642  const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
16644  Dispatch const & d ) const
16645  {
16646  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16647 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16648  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16649 # endif
16650 
16651  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
16652  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR(
16653  m_device,
16654  static_cast<VkDeferredOperationKHR>( deferredOperation ),
16655  static_cast<VkPipelineCache>( pipelineCache ),
16656  1,
16657  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
16658  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16659  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
16660  resultCheck( result,
16661  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique",
16662  { VULKAN_HPP_NAMESPACE::Result::eSuccess,
16663  VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
16664  VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
16665  VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
16666 
16668  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16670  }
16671 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
16672 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16673 
16674  template <typename Dispatch>
16675  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
16676  uint32_t firstGroup,
16677  uint32_t groupCount,
16678  size_t dataSize,
16679  void * pData,
16680  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16681  {
16682  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16683  return static_cast<Result>(
16684  d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
16685  }
16686 
16687 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16688  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
16690  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
16691  {
16692  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16693 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16694  VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR &&
16695  "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
16696 # endif
16697 
16698  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
16699  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
16700  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR(
16701  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) );
16702  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
16703 
16704  return createResultValueType( result, data );
16705  }
16706 
16707  template <typename DataType, typename Dispatch>
16709  Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
16710  {
16711  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16712 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16713  VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR &&
16714  "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
16715 # endif
16716 
16717  DataType data;
16718  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR(
16719  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) );
16720  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
16721 
16722  return createResultValueType( result, data );
16723  }
16724 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16725 
16726  template <typename Dispatch>
16727  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
16728  uint32_t firstGroup,
16729  uint32_t groupCount,
16730  size_t dataSize,
16731  void * pData,
16732  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16733  {
16734  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16735  return static_cast<Result>(
16736  d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
16737  }
16738 
16739 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16740  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
16742  Device::getRayTracingCaptureReplayShaderGroupHandlesKHR(
16743  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
16744  {
16745  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16746 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16747  VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR &&
16748  "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16749 # endif
16750 
16751  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
16752  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
16753  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
16754  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) );
16755  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
16756 
16757  return createResultValueType( result, data );
16758  }
16759 
16760  template <typename DataType, typename Dispatch>
16761  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR(
16762  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
16763  {
16764  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16765 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16766  VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR &&
16767  "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16768 # endif
16769 
16770  DataType data;
16771  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
16772  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) );
16773  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
16774 
16775  return createResultValueType( result, data );
16776  }
16777 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16778 
16779  template <typename Dispatch>
16780  VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
16781  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
16782  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
16783  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
16784  VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
16785  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16786  {
16787  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16788  d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
16789  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
16790  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
16791  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
16792  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
16793  static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
16794  }
16795 
16796 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16797  template <typename Dispatch>
16798  VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
16799  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
16800  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
16801  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
16802  VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
16803  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16804  {
16805  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16806 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16807  VULKAN_HPP_ASSERT( d.vkCmdTraceRaysIndirectKHR && "Function <vkCmdTraceRaysIndirectKHR> requires <VK_KHR_ray_tracing_pipeline>" );
16808 # endif
16809 
16810  d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
16811  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
16812  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
16813  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
16814  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
16815  static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
16816  }
16817 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16818 
16819  template <typename Dispatch>
16820  VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
16821  uint32_t group,
16823  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16824  {
16825  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16826  return static_cast<DeviceSize>(
16827  d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
16828  }
16829 
16830  template <typename Dispatch>
16831  VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16832  {
16833  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16834  d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
16835  }
16836 
16837  //=== VK_KHR_sampler_ycbcr_conversion ===
16838 
16839  template <typename Dispatch>
16841  Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
16842  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
16843  VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
16844  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16845  {
16846  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16847  return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device,
16848  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
16849  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
16850  reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
16851  }
16852 
16853 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16854  template <typename Dispatch>
16856  Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
16858  Dispatch const & d ) const
16859  {
16860  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16861 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16862  VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR &&
16863  "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" );
16864 # endif
16865 
16866  VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
16867  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR(
16868  m_device,
16869  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
16870  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16871  reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
16872  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
16873 
16874  return createResultValueType( result, ycbcrConversion );
16875  }
16876 
16877 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16878  template <typename Dispatch>
16880  Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
16882  Dispatch const & d ) const
16883  {
16884  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16885 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16886  VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR &&
16887  "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" );
16888 # endif
16889 
16890  VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
16891  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR(
16892  m_device,
16893  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
16894  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16895  reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
16896  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" );
16897 
16898  return createResultValueType(
16900  }
16901 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
16902 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16903 
16904  template <typename Dispatch>
16905  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
16906  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
16907  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16908  {
16909  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16910  d.vkDestroySamplerYcbcrConversionKHR(
16911  m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
16912  }
16913 
16914 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16915  template <typename Dispatch>
16916  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
16918  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16919  {
16920  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16921 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16922  VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversionKHR &&
16923  "Function <vkDestroySamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" );
16924 # endif
16925 
16926  d.vkDestroySamplerYcbcrConversionKHR(
16927  m_device,
16928  static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
16929  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
16930  }
16931 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16932 
16933  //=== VK_KHR_bind_memory2 ===
16934 
16935  template <typename Dispatch>
16936  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount,
16937  const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
16938  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16939  {
16940  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16941  return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
16942  }
16943 
16944 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16945  template <typename Dispatch>
16947  Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
16948  Dispatch const & d ) const
16949  {
16950  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16951 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16952  VULKAN_HPP_ASSERT( d.vkBindBufferMemory2KHR && "Function <vkBindBufferMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
16953 # endif
16954 
16956  d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
16957  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
16958 
16959  return createResultValueType( result );
16960  }
16961 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16962 
16963  template <typename Dispatch>
16964  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount,
16965  const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
16966  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16967  {
16968  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16969  return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
16970  }
16971 
16972 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16973  template <typename Dispatch>
16975  Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
16976  {
16977  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16978 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
16979  VULKAN_HPP_ASSERT( d.vkBindImageMemory2KHR && "Function <vkBindImageMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
16980 # endif
16981 
16983  d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
16984  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
16985 
16986  return createResultValueType( result );
16987  }
16988 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16989 
16990  //=== VK_EXT_image_drm_format_modifier ===
16991 
16992  template <typename Dispatch>
16993  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT(
16994  VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16995  {
16996  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16997  return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
16998  m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
16999  }
17000 
17001 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17002  template <typename Dispatch>
17004  Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
17005  {
17006  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17007 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17008  VULKAN_HPP_ASSERT( d.vkGetImageDrmFormatModifierPropertiesEXT &&
17009  "Function <vkGetImageDrmFormatModifierPropertiesEXT> requires <VK_EXT_image_drm_format_modifier>" );
17010 # endif
17011 
17012  VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
17013  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
17014  m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) );
17015  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
17016 
17017  return createResultValueType( result, properties );
17018  }
17019 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17020 
17021  //=== VK_EXT_validation_cache ===
17022 
17023  template <typename Dispatch>
17024  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
17025  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17026  VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
17027  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17028  {
17029  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17030  return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device,
17031  reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
17032  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
17033  reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
17034  }
17035 
17036 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17037  template <typename Dispatch>
17039  Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
17041  Dispatch const & d ) const
17042  {
17043  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17044 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17045  VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" );
17046 # endif
17047 
17048  VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
17049  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT(
17050  m_device,
17051  reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
17052  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17053  reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
17054  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
17055 
17056  return createResultValueType( result, validationCache );
17057  }
17058 
17059 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17060  template <typename Dispatch>
17062  Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
17064  Dispatch const & d ) const
17065  {
17066  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17067 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17068  VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" );
17069 # endif
17070 
17071  VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
17072  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT(
17073  m_device,
17074  reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
17075  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17076  reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
17077  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" );
17078 
17079  return createResultValueType(
17081  }
17082 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
17083 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17084 
17085  template <typename Dispatch>
17086  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
17087  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17088  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17089  {
17090  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17091  d.vkDestroyValidationCacheEXT(
17092  m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17093  }
17094 
17095 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17096  template <typename Dispatch>
17097  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
17099  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17100  {
17101  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17102 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17103  VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" );
17104 # endif
17105 
17106  d.vkDestroyValidationCacheEXT(
17107  m_device,
17108  static_cast<VkValidationCacheEXT>( validationCache ),
17109  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17110  }
17111 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17112 
17113  template <typename Dispatch>
17114  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
17115  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17116  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17117  {
17118  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17119  d.vkDestroyValidationCacheEXT(
17120  m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17121  }
17122 
17123 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17124  template <typename Dispatch>
17125  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
17127  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17128  {
17129  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17130 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17131  VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" );
17132 # endif
17133 
17134  d.vkDestroyValidationCacheEXT(
17135  m_device,
17136  static_cast<VkValidationCacheEXT>( validationCache ),
17137  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17138  }
17139 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17140 
17141  template <typename Dispatch>
17142  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
17143  uint32_t srcCacheCount,
17144  const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
17145  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17146  {
17147  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17148  return static_cast<Result>( d.vkMergeValidationCachesEXT(
17149  m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
17150  }
17151 
17152 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17153  template <typename Dispatch>
17155  Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
17156  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
17157  Dispatch const & d ) const
17158  {
17159  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17160 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17161  VULKAN_HPP_ASSERT( d.vkMergeValidationCachesEXT && "Function <vkMergeValidationCachesEXT> requires <VK_EXT_validation_cache>" );
17162 # endif
17163 
17164  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergeValidationCachesEXT(
17165  m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) );
17166  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
17167 
17168  return createResultValueType( result );
17169  }
17170 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17171 
17172  template <typename Dispatch>
17173  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
17174  size_t * pDataSize,
17175  void * pData,
17176  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17177  {
17178  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17179  return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
17180  }
17181 
17182 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17183  template <typename Uint8_tAllocator, typename Dispatch>
17185  Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const
17186  {
17187  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17188 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17189  VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" );
17190 # endif
17191 
17192  std::vector<uint8_t, Uint8_tAllocator> data;
17193  size_t dataSize;
17195  do
17196  {
17197  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
17198  d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
17199  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
17200  {
17201  data.resize( dataSize );
17202  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
17203  d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
17204  }
17205  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
17206  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
17207  VULKAN_HPP_ASSERT( dataSize <= data.size() );
17208  if ( dataSize < data.size() )
17209  {
17210  data.resize( dataSize );
17211  }
17212  return createResultValueType( result, data );
17213  }
17214 
17215  template <typename Uint8_tAllocator,
17216  typename Dispatch,
17217  typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type>
17219  Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
17220  {
17221  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17222 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17223  VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" );
17224 # endif
17225 
17226  std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
17227  size_t dataSize;
17229  do
17230  {
17231  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
17232  d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
17233  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
17234  {
17235  data.resize( dataSize );
17236  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
17237  d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
17238  }
17239  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
17240  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
17241  VULKAN_HPP_ASSERT( dataSize <= data.size() );
17242  if ( dataSize < data.size() )
17243  {
17244  data.resize( dataSize );
17245  }
17246  return createResultValueType( result, data );
17247  }
17248 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17249 
17250  //=== VK_NV_shading_rate_image ===
17251 
17252  template <typename Dispatch>
17253  VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
17255  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17256  {
17257  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17258  d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
17259  }
17260 
17261  template <typename Dispatch>
17262  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport,
17263  uint32_t viewportCount,
17264  const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
17265  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17266  {
17267  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17268  d.vkCmdSetViewportShadingRatePaletteNV(
17269  m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
17270  }
17271 
17272 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17273  template <typename Dispatch>
17274  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
17275  uint32_t firstViewport,
17276  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
17277  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17278  {
17279  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17280 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17281  VULKAN_HPP_ASSERT( d.vkCmdSetViewportShadingRatePaletteNV && "Function <vkCmdSetViewportShadingRatePaletteNV> requires <VK_NV_shading_rate_image>" );
17282 # endif
17283 
17284  d.vkCmdSetViewportShadingRatePaletteNV(
17285  m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
17286  }
17287 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17288 
17289  template <typename Dispatch>
17290  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
17291  uint32_t customSampleOrderCount,
17292  const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
17293  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17294  {
17295  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17296  d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
17297  static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
17298  customSampleOrderCount,
17299  reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
17300  }
17301 
17302 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17303  template <typename Dispatch>
17304  VULKAN_HPP_INLINE void
17305  CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
17306  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
17307  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17308  {
17309  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17310 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17311  VULKAN_HPP_ASSERT( d.vkCmdSetCoarseSampleOrderNV && "Function <vkCmdSetCoarseSampleOrderNV> requires <VK_NV_shading_rate_image>" );
17312 # endif
17313 
17314  d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
17315  static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
17316  customSampleOrders.size(),
17317  reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
17318  }
17319 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17320 
17321  //=== VK_NV_ray_tracing ===
17322 
17323  template <typename Dispatch>
17325  Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
17326  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17327  VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
17328  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17329  {
17330  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17331  return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device,
17332  reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
17333  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
17334  reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
17335  }
17336 
17337 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17338  template <typename Dispatch>
17340  Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
17342  Dispatch const & d ) const
17343  {
17344  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17345 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17346  VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" );
17347 # endif
17348 
17349  VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
17350  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV(
17351  m_device,
17352  reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
17353  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17354  reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
17355  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
17356 
17357  return createResultValueType( result, accelerationStructure );
17358  }
17359 
17360 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17361  template <typename Dispatch>
17363  Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
17365  Dispatch const & d ) const
17366  {
17367  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17368 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17369  VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" );
17370 # endif
17371 
17372  VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
17373  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV(
17374  m_device,
17375  reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
17376  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17377  reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
17378  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" );
17379 
17380  return createResultValueType(
17381  result,
17383  }
17384 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
17385 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17386 
17387  template <typename Dispatch>
17388  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
17389  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17390  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17391  {
17392  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17393  d.vkDestroyAccelerationStructureNV(
17394  m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17395  }
17396 
17397 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17398  template <typename Dispatch>
17399  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
17401  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17402  {
17403  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17404 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17405  VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" );
17406 # endif
17407 
17408  d.vkDestroyAccelerationStructureNV(
17409  m_device,
17410  static_cast<VkAccelerationStructureNV>( accelerationStructure ),
17411  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17412  }
17413 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17414 
17415  template <typename Dispatch>
17416  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
17417  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17418  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17419  {
17420  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17421  d.vkDestroyAccelerationStructureNV(
17422  m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17423  }
17424 
17425 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17426  template <typename Dispatch>
17427  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
17429  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17430  {
17431  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17432 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17433  VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" );
17434 # endif
17435 
17436  d.vkDestroyAccelerationStructureNV(
17437  m_device,
17438  static_cast<VkAccelerationStructureNV>( accelerationStructure ),
17439  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17440  }
17441 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17442 
17443  template <typename Dispatch>
17444  VULKAN_HPP_INLINE void
17445  Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
17446  VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
17447  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17448  {
17449  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17450  d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
17451  reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
17452  reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
17453  }
17454 
17455 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17456  template <typename Dispatch>
17458  Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
17459  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17460  {
17461  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17462 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17463  VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV &&
17464  "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" );
17465 # endif
17466 
17468  d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
17469  reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
17470  reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
17471 
17472  return memoryRequirements;
17473  }
17474 
17475  template <typename X, typename Y, typename... Z, typename Dispatch>
17476  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
17477  Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
17478  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17479  {
17480  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17481 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17482  VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV &&
17483  "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" );
17484 # endif
17485 
17486  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
17487  VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
17488  d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
17489  reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
17490  reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
17491 
17492  return structureChain;
17493  }
17494 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17495 
17496  template <typename Dispatch>
17497  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV(
17498  uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17499  {
17500  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17501  return static_cast<Result>(
17502  d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
17503  }
17504 
17505 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17506  template <typename Dispatch>
17508  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const
17509  {
17510  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17511 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17512  VULKAN_HPP_ASSERT( d.vkBindAccelerationStructureMemoryNV && "Function <vkBindAccelerationStructureMemoryNV> requires <VK_NV_ray_tracing>" );
17513 # endif
17514 
17515  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindAccelerationStructureMemoryNV(
17516  m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) );
17517  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
17518 
17519  return createResultValueType( result );
17520  }
17521 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17522 
17523  template <typename Dispatch>
17524  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
17525  VULKAN_HPP_NAMESPACE::Buffer instanceData,
17526  VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
17528  VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
17529  VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
17530  VULKAN_HPP_NAMESPACE::Buffer scratch,
17531  VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
17532  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17533  {
17534  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17535  d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
17536  reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ),
17537  static_cast<VkBuffer>( instanceData ),
17538  static_cast<VkDeviceSize>( instanceOffset ),
17539  static_cast<VkBool32>( update ),
17540  static_cast<VkAccelerationStructureNV>( dst ),
17541  static_cast<VkAccelerationStructureNV>( src ),
17542  static_cast<VkBuffer>( scratch ),
17543  static_cast<VkDeviceSize>( scratchOffset ) );
17544  }
17545 
17546 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17547  template <typename Dispatch>
17548  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
17549  VULKAN_HPP_NAMESPACE::Buffer instanceData,
17550  VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
17552  VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
17553  VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
17554  VULKAN_HPP_NAMESPACE::Buffer scratch,
17555  VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
17556  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17557  {
17558  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17559 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17560  VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructureNV && "Function <vkCmdBuildAccelerationStructureNV> requires <VK_NV_ray_tracing>" );
17561 # endif
17562 
17563  d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
17564  reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
17565  static_cast<VkBuffer>( instanceData ),
17566  static_cast<VkDeviceSize>( instanceOffset ),
17567  static_cast<VkBool32>( update ),
17568  static_cast<VkAccelerationStructureNV>( dst ),
17569  static_cast<VkAccelerationStructureNV>( src ),
17570  static_cast<VkBuffer>( scratch ),
17571  static_cast<VkDeviceSize>( scratchOffset ) );
17572  }
17573 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17574 
17575  template <typename Dispatch>
17576  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
17577  VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
17579  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17580  {
17581  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17582  d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
17583  static_cast<VkAccelerationStructureNV>( dst ),
17584  static_cast<VkAccelerationStructureNV>( src ),
17585  static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
17586  }
17587 
17588  template <typename Dispatch>
17589  VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
17590  VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
17591  VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
17592  VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
17593  VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
17594  VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
17595  VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
17596  VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
17597  VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
17598  VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
17599  VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
17600  uint32_t width,
17601  uint32_t height,
17602  uint32_t depth,
17603  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17604  {
17605  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17606  d.vkCmdTraceRaysNV( m_commandBuffer,
17607  static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
17608  static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
17609  static_cast<VkBuffer>( missShaderBindingTableBuffer ),
17610  static_cast<VkDeviceSize>( missShaderBindingOffset ),
17611  static_cast<VkDeviceSize>( missShaderBindingStride ),
17612  static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
17613  static_cast<VkDeviceSize>( hitShaderBindingOffset ),
17614  static_cast<VkDeviceSize>( hitShaderBindingStride ),
17615  static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
17616  static_cast<VkDeviceSize>( callableShaderBindingOffset ),
17617  static_cast<VkDeviceSize>( callableShaderBindingStride ),
17618  width,
17619  height,
17620  depth );
17621  }
17622 
17623  template <typename Dispatch>
17624  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
17625  uint32_t createInfoCount,
17626  const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
17627  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17628  VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
17629  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17630  {
17631  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17632  return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device,
17633  static_cast<VkPipelineCache>( pipelineCache ),
17634  createInfoCount,
17635  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
17636  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
17637  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
17638  }
17639 
17640 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17641  template <typename PipelineAllocator, typename Dispatch>
17643  Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
17644  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
17646  Dispatch const & d ) const
17647  {
17648  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17649 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17650  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" );
17651 # endif
17652 
17653  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
17654  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV(
17655  m_device,
17656  static_cast<VkPipelineCache>( pipelineCache ),
17657  createInfos.size(),
17658  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
17659  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17660  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
17661  resultCheck( result,
17662  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
17663  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
17664 
17666  }
17667 
17668  template <typename PipelineAllocator,
17669  typename Dispatch,
17670  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type>
17672  Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
17673  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
17675  PipelineAllocator & pipelineAllocator,
17676  Dispatch const & d ) const
17677  {
17678  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17679 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17680  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" );
17681 # endif
17682 
17683  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
17684  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV(
17685  m_device,
17686  static_cast<VkPipelineCache>( pipelineCache ),
17687  createInfos.size(),
17688  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
17689  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17690  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
17691  resultCheck( result,
17692  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
17693  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
17694 
17696  }
17697 
17698  template <typename Dispatch>
17700  Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
17701  const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
17703  Dispatch const & d ) const
17704  {
17705  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17706 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17707  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" );
17708 # endif
17709 
17710  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
17711  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV(
17712  m_device,
17713  static_cast<VkPipelineCache>( pipelineCache ),
17714  1,
17715  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
17716  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17717  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
17718  resultCheck( result,
17719  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV",
17720  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
17721 
17722  return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
17723  }
17724 
17725 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17726  template <typename Dispatch, typename PipelineAllocator>
17728  Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
17729  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
17731  Dispatch const & d ) const
17732  {
17733  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17734 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17735  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" );
17736 # endif
17737 
17738  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
17739  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV(
17740  m_device,
17741  static_cast<VkPipelineCache>( pipelineCache ),
17742  createInfos.size(),
17743  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
17744  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17745  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
17746  resultCheck( result,
17747  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
17748  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
17749  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
17750  uniquePipelines.reserve( createInfos.size() );
17751  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
17752  for ( auto const & pipeline : pipelines )
17753  {
17754  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
17755  }
17757  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
17758  }
17759 
17760  template <
17761  typename Dispatch,
17762  typename PipelineAllocator,
17763  typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type>
17765  Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
17766  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
17768  PipelineAllocator & pipelineAllocator,
17769  Dispatch const & d ) const
17770  {
17771  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17772 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17773  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" );
17774 # endif
17775 
17776  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
17777  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV(
17778  m_device,
17779  static_cast<VkPipelineCache>( pipelineCache ),
17780  createInfos.size(),
17781  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
17782  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17783  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
17784  resultCheck( result,
17785  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
17786  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
17787  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
17788  uniquePipelines.reserve( createInfos.size() );
17789  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
17790  for ( auto const & pipeline : pipelines )
17791  {
17792  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
17793  }
17795  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
17796  }
17797 
17798  template <typename Dispatch>
17800  Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
17801  const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
17803  Dispatch const & d ) const
17804  {
17805  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17806 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17807  VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" );
17808 # endif
17809 
17810  VULKAN_HPP_NAMESPACE::Pipeline pipeline;
17811  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV(
17812  m_device,
17813  static_cast<VkPipelineCache>( pipelineCache ),
17814  1,
17815  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
17816  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17817  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
17818  resultCheck( result,
17819  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique",
17820  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
17821 
17823  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
17825  }
17826 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
17827 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17828 
17829  template <typename Dispatch>
17830  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
17831  uint32_t firstGroup,
17832  uint32_t groupCount,
17833  size_t dataSize,
17834  void * pData,
17835  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17836  {
17837  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17838  return static_cast<Result>(
17839  d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
17840  }
17841 
17842 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17843  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
17845  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
17846  {
17847  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17848 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17849  VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV &&
17850  "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
17851 # endif
17852 
17853  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
17854  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
17855  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV(
17856  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) );
17857  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
17858 
17859  return createResultValueType( result, data );
17860  }
17861 
17862  template <typename DataType, typename Dispatch>
17864  Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
17865  {
17866  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17867 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17868  VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV &&
17869  "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
17870 # endif
17871 
17872  DataType data;
17873  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV(
17874  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) );
17875  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
17876 
17877  return createResultValueType( result, data );
17878  }
17879 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17880 
17881  template <typename Dispatch>
17882  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
17883  size_t dataSize,
17884  void * pData,
17885  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17886  {
17887  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17888  return static_cast<Result>(
17889  d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
17890  }
17891 
17892 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17893  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
17895  Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const
17896  {
17897  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17898 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17899  VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" );
17900 # endif
17901 
17902  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
17903  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
17904  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV(
17905  m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) );
17906  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
17907 
17908  return createResultValueType( result, data );
17909  }
17910 
17911  template <typename DataType, typename Dispatch>
17913  Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const
17914  {
17915  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17916 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17917  VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" );
17918 # endif
17919 
17920  DataType data;
17921  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV(
17922  m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) ) );
17923  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
17924 
17925  return createResultValueType( result, data );
17926  }
17927 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17928 
17929  template <typename Dispatch>
17930  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount,
17931  const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
17933  VULKAN_HPP_NAMESPACE::QueryPool queryPool,
17934  uint32_t firstQuery,
17935  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17936  {
17937  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17938  d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
17939  accelerationStructureCount,
17940  reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
17941  static_cast<VkQueryType>( queryType ),
17942  static_cast<VkQueryPool>( queryPool ),
17943  firstQuery );
17944  }
17945 
17946 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17947  template <typename Dispatch>
17948  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
17949  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
17951  VULKAN_HPP_NAMESPACE::QueryPool queryPool,
17952  uint32_t firstQuery,
17953  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17954  {
17955  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17956 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17957  VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesNV &&
17958  "Function <vkCmdWriteAccelerationStructuresPropertiesNV> requires <VK_NV_ray_tracing>" );
17959 # endif
17960 
17961  d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
17962  accelerationStructures.size(),
17963  reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
17964  static_cast<VkQueryType>( queryType ),
17965  static_cast<VkQueryPool>( queryPool ),
17966  firstQuery );
17967  }
17968 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17969 
17970 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17971  template <typename Dispatch>
17972  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
17973  uint32_t shader,
17974  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17975  {
17976  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17977  return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
17978  }
17979 #else
17980  template <typename Dispatch>
17981  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
17982  Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
17983  {
17984  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17985 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
17986  VULKAN_HPP_ASSERT( d.vkCompileDeferredNV && "Function <vkCompileDeferredNV> requires <VK_NV_ray_tracing>" );
17987 # endif
17988 
17990  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
17991  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
17992 
17993  return createResultValueType( result );
17994  }
17995 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17996 
17997  //=== VK_KHR_maintenance3 ===
17998 
17999  template <typename Dispatch>
18000  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
18001  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
18002  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18003  {
18004  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18005  d.vkGetDescriptorSetLayoutSupportKHR(
18006  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
18007  }
18008 
18009 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18010  template <typename Dispatch>
18011  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
18012  Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
18013  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18014  {
18015  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18016 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18017  VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR &&
18018  "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
18019 # endif
18020 
18021  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
18022  d.vkGetDescriptorSetLayoutSupportKHR(
18023  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
18024 
18025  return support;
18026  }
18027 
18028  template <typename X, typename Y, typename... Z, typename Dispatch>
18029  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
18030  Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
18031  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18032  {
18033  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18034 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18035  VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR &&
18036  "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
18037 # endif
18038 
18039  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
18040  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
18041  d.vkGetDescriptorSetLayoutSupportKHR(
18042  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
18043 
18044  return structureChain;
18045  }
18046 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18047 
18048  //=== VK_KHR_draw_indirect_count ===
18049 
18050  template <typename Dispatch>
18051  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
18053  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
18054  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
18055  uint32_t maxDrawCount,
18056  uint32_t stride,
18057  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18058  {
18059  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18060  d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
18061  static_cast<VkBuffer>( buffer ),
18062  static_cast<VkDeviceSize>( offset ),
18063  static_cast<VkBuffer>( countBuffer ),
18064  static_cast<VkDeviceSize>( countBufferOffset ),
18065  maxDrawCount,
18066  stride );
18067  }
18068 
18069  template <typename Dispatch>
18070  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
18072  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
18073  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
18074  uint32_t maxDrawCount,
18075  uint32_t stride,
18076  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18077  {
18078  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18079  d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
18080  static_cast<VkBuffer>( buffer ),
18081  static_cast<VkDeviceSize>( offset ),
18082  static_cast<VkBuffer>( countBuffer ),
18083  static_cast<VkDeviceSize>( countBufferOffset ),
18084  maxDrawCount,
18085  stride );
18086  }
18087 
18088  //=== VK_EXT_external_memory_host ===
18089 
18090  template <typename Dispatch>
18092  Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
18093  const void * pHostPointer,
18094  VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
18095  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18096  {
18097  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18098  return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device,
18099  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
18100  pHostPointer,
18101  reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
18102  }
18103 
18104 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18105  template <typename Dispatch>
18107  Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
18108  const void * pHostPointer,
18109  Dispatch const & d ) const
18110  {
18111  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18112 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18113  VULKAN_HPP_ASSERT( d.vkGetMemoryHostPointerPropertiesEXT && "Function <vkGetMemoryHostPointerPropertiesEXT> requires <VK_EXT_external_memory_host>" );
18114 # endif
18115 
18116  VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
18118  d.vkGetMemoryHostPointerPropertiesEXT( m_device,
18119  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
18120  pHostPointer,
18121  reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) );
18122  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
18123 
18124  return createResultValueType( result, memoryHostPointerProperties );
18125  }
18126 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18127 
18128  //=== VK_AMD_buffer_marker ===
18129 
18130  template <typename Dispatch>
18131  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
18132  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
18134  uint32_t marker,
18135  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18136  {
18137  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18138  d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
18139  static_cast<VkPipelineStageFlagBits>( pipelineStage ),
18140  static_cast<VkBuffer>( dstBuffer ),
18141  static_cast<VkDeviceSize>( dstOffset ),
18142  marker );
18143  }
18144 
18145  //=== VK_EXT_calibrated_timestamps ===
18146 
18147  template <typename Dispatch>
18148  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount,
18149  VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,
18150  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18151  {
18152  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18153  return static_cast<Result>(
18154  d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) );
18155  }
18156 
18157 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18158  template <typename TimeDomainKHRAllocator, typename Dispatch>
18160  PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
18161  {
18162  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18163 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18164  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT &&
18165  "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
18166 # endif
18167 
18168  std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains;
18169  uint32_t timeDomainCount;
18171  do
18172  {
18173  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
18174  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount )
18175  {
18176  timeDomains.resize( timeDomainCount );
18177  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
18178  d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) );
18179  }
18180  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
18181  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
18182  VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
18183  if ( timeDomainCount < timeDomains.size() )
18184  {
18185  timeDomains.resize( timeDomainCount );
18186  }
18187  return createResultValueType( result, timeDomains );
18188  }
18189 
18190  template <typename TimeDomainKHRAllocator,
18191  typename Dispatch,
18192  typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type>
18194  PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const
18195  {
18196  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18197 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18198  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT &&
18199  "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
18200 # endif
18201 
18202  std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator );
18203  uint32_t timeDomainCount;
18205  do
18206  {
18207  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
18208  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount )
18209  {
18210  timeDomains.resize( timeDomainCount );
18211  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
18212  d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) );
18213  }
18214  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
18215  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
18216  VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
18217  if ( timeDomainCount < timeDomains.size() )
18218  {
18219  timeDomains.resize( timeDomainCount );
18220  }
18221  return createResultValueType( result, timeDomains );
18222  }
18223 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18224 
18225  template <typename Dispatch>
18226  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount,
18227  const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,
18228  uint64_t * pTimestamps,
18229  uint64_t * pMaxDeviation,
18230  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18231  {
18232  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18233  return static_cast<Result>( d.vkGetCalibratedTimestampsEXT(
18234  m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
18235  }
18236 
18237 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18238  template <typename Uint64_tAllocator, typename Dispatch>
18240  Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,
18241  Dispatch const & d ) const
18242  {
18243  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18244 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18245  VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT &&
18246  "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
18247 # endif
18248 
18249  std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_(
18250  std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
18251  std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first;
18252  uint64_t & maxDeviation = data_.second;
18253  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT(
18254  m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
18255  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
18256 
18257  return createResultValueType( result, data_ );
18258  }
18259 
18260  template <typename Uint64_tAllocator,
18261  typename Dispatch,
18262  typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type>
18264  Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,
18265  Uint64_tAllocator & uint64_tAllocator,
18266  Dispatch const & d ) const
18267  {
18268  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18269 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18270  VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT &&
18271  "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
18272 # endif
18273 
18274  std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_(
18275  std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
18276  std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first;
18277  uint64_t & maxDeviation = data_.second;
18278  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT(
18279  m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
18280  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
18281 
18282  return createResultValueType( result, data_ );
18283  }
18284 
18285  template <typename Dispatch>
18287  Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const
18288  {
18289  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18290 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18291  VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT &&
18292  "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
18293 # endif
18294 
18295  std::pair<uint64_t, uint64_t> data_;
18296  uint64_t & timestamp = data_.first;
18297  uint64_t & maxDeviation = data_.second;
18299  d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( &timestampInfo ), &timestamp, &maxDeviation ) );
18300  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
18301 
18302  return createResultValueType( result, data_ );
18303  }
18304 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18305 
18306  //=== VK_NV_mesh_shader ===
18307 
18308  template <typename Dispatch>
18309  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18310  {
18311  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18312  d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
18313  }
18314 
18315  template <typename Dispatch>
18316  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
18318  uint32_t drawCount,
18319  uint32_t stride,
18320  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18321  {
18322  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18323  d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
18324  }
18325 
18326  template <typename Dispatch>
18327  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
18329  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
18330  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
18331  uint32_t maxDrawCount,
18332  uint32_t stride,
18333  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18334  {
18335  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18336  d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
18337  static_cast<VkBuffer>( buffer ),
18338  static_cast<VkDeviceSize>( offset ),
18339  static_cast<VkBuffer>( countBuffer ),
18340  static_cast<VkDeviceSize>( countBufferOffset ),
18341  maxDrawCount,
18342  stride );
18343  }
18344 
18345  //=== VK_NV_scissor_exclusive ===
18346 
18347  template <typename Dispatch>
18348  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor,
18349  uint32_t exclusiveScissorCount,
18350  const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables,
18351  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18352  {
18353  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18354  d.vkCmdSetExclusiveScissorEnableNV(
18355  m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) );
18356  }
18357 
18358 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18359  template <typename Dispatch>
18360  VULKAN_HPP_INLINE void
18361  CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor,
18362  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables,
18363  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18364  {
18365  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18366 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18367  VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorEnableNV && "Function <vkCmdSetExclusiveScissorEnableNV> requires <VK_NV_scissor_exclusive>" );
18368 # endif
18369 
18370  d.vkCmdSetExclusiveScissorEnableNV(
18371  m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast<const VkBool32 *>( exclusiveScissorEnables.data() ) );
18372  }
18373 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18374 
18375  template <typename Dispatch>
18376  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
18377  uint32_t exclusiveScissorCount,
18378  const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
18379  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18380  {
18381  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18382  d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
18383  }
18384 
18385 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18386  template <typename Dispatch>
18387  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
18388  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
18389  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18390  {
18391  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18392 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18393  VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorNV && "Function <vkCmdSetExclusiveScissorNV> requires <VK_NV_scissor_exclusive>" );
18394 # endif
18395 
18396  d.vkCmdSetExclusiveScissorNV(
18397  m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
18398  }
18399 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18400 
18401  //=== VK_NV_device_diagnostic_checkpoints ===
18402 
18403  template <typename Dispatch>
18404  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18405  {
18406  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18407  d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
18408  }
18409 
18410 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18411  template <typename CheckpointMarkerType, typename Dispatch>
18412  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18413  {
18414  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18415 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18416  VULKAN_HPP_ASSERT( d.vkCmdSetCheckpointNV && "Function <vkCmdSetCheckpointNV> requires <VK_NV_device_diagnostic_checkpoints>" );
18417 # endif
18418 
18419  d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) );
18420  }
18421 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18422 
18423  template <typename Dispatch>
18424  VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount,
18425  VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
18426  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18427  {
18428  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18429  d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
18430  }
18431 
18432 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18433  template <typename CheckpointDataNVAllocator, typename Dispatch>
18434  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
18435  Queue::getCheckpointDataNV( Dispatch const & d ) const
18436  {
18437  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18438 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18439  VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" );
18440 # endif
18441 
18442  std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
18443  uint32_t checkpointDataCount;
18444  d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
18445  checkpointData.resize( checkpointDataCount );
18446  d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
18447 
18448  VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
18449  if ( checkpointDataCount < checkpointData.size() )
18450  {
18451  checkpointData.resize( checkpointDataCount );
18452  }
18453  return checkpointData;
18454  }
18455 
18456  template <typename CheckpointDataNVAllocator,
18457  typename Dispatch,
18458  typename std::enable_if<std::is_same<typename CheckpointDataNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, int>::type>
18459  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
18460  Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
18461  {
18462  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18463 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18464  VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" );
18465 # endif
18466 
18467  std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
18468  uint32_t checkpointDataCount;
18469  d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
18470  checkpointData.resize( checkpointDataCount );
18471  d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
18472 
18473  VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
18474  if ( checkpointDataCount < checkpointData.size() )
18475  {
18476  checkpointData.resize( checkpointDataCount );
18477  }
18478  return checkpointData;
18479  }
18480 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18481 
18482  //=== VK_KHR_timeline_semaphore ===
18483 
18484  template <typename Dispatch>
18485  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
18486  uint64_t * pValue,
18487  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18488  {
18489  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18490  return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
18491  }
18492 
18493 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18494  template <typename Dispatch>
18496  Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
18497  {
18498  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18499 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18500  VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValueKHR && "Function <vkGetSemaphoreCounterValueKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
18501 # endif
18502 
18503  uint64_t value;
18505  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
18506  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
18507 
18508  return createResultValueType( result, value );
18509  }
18510 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18511 
18512  template <typename Dispatch>
18513  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
18514  uint64_t timeout,
18515  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18516  {
18517  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18518  return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
18519  }
18520 
18521 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18522  template <typename Dispatch>
18524  Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
18525  {
18526  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18527 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18528  VULKAN_HPP_ASSERT( d.vkWaitSemaphoresKHR && "Function <vkWaitSemaphoresKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
18529 # endif
18530 
18532  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
18533  resultCheck(
18534  result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
18535 
18536  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
18537  }
18538 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18539 
18540  template <typename Dispatch>
18541  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
18542  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18543  {
18544  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18545  return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
18546  }
18547 
18548 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18549  template <typename Dispatch>
18551  Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
18552  {
18553  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18554 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18555  VULKAN_HPP_ASSERT( d.vkSignalSemaphoreKHR && "Function <vkSignalSemaphoreKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
18556 # endif
18557 
18559  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
18560  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
18561 
18562  return createResultValueType( result );
18563  }
18564 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18565 
18566  //=== VK_INTEL_performance_query ===
18567 
18568  template <typename Dispatch>
18569  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL(
18570  const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18571  {
18572  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18573  return static_cast<Result>(
18574  d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
18575  }
18576 
18577 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18578  template <typename Dispatch>
18580  Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const
18581  {
18582  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18583 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18584  VULKAN_HPP_ASSERT( d.vkInitializePerformanceApiINTEL && "Function <vkInitializePerformanceApiINTEL> requires <VK_INTEL_performance_query>" );
18585 # endif
18586 
18588  d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) );
18589  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
18590 
18591  return createResultValueType( result );
18592  }
18593 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18594 
18595  template <typename Dispatch>
18596  VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18597  {
18598  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18599  d.vkUninitializePerformanceApiINTEL( m_device );
18600  }
18601 
18602  template <typename Dispatch>
18603  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,
18604  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18605  {
18606  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18607  return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
18608  }
18609 
18610 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18611  template <typename Dispatch>
18613  CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
18614  {
18615  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18616 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18617  VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceMarkerINTEL && "Function <vkCmdSetPerformanceMarkerINTEL> requires <VK_INTEL_performance_query>" );
18618 # endif
18619 
18621  d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) );
18622  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
18623 
18624  return createResultValueType( result );
18625  }
18626 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18627 
18628  template <typename Dispatch>
18629  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL(
18630  const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18631  {
18632  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18633  return static_cast<Result>(
18634  d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
18635  }
18636 
18637 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18638  template <typename Dispatch>
18640  CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
18641  {
18642  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18644  VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceStreamMarkerINTEL && "Function <vkCmdSetPerformanceStreamMarkerINTEL> requires <VK_INTEL_performance_query>" );
18645 # endif
18646 
18648  d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) );
18649  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
18650 
18651  return createResultValueType( result );
18652  }
18653 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18654 
18655  template <typename Dispatch>
18656  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL(
18657  const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18658  {
18659  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18660  return static_cast<Result>(
18661  d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
18662  }
18663 
18664 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18665  template <typename Dispatch>
18667  CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const
18668  {
18669  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18670 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18671  VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceOverrideINTEL && "Function <vkCmdSetPerformanceOverrideINTEL> requires <VK_INTEL_performance_query>" );
18672 # endif
18673 
18675  d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) );
18676  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
18677 
18678  return createResultValueType( result );
18679  }
18680 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18681 
18682  template <typename Dispatch>
18684  Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
18685  VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
18686  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18687  {
18688  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18689  return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device,
18690  reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
18691  reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
18692  }
18693 
18694 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18695  template <typename Dispatch>
18697  Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
18698  {
18699  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18700 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18701  VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" );
18702 # endif
18703 
18704  VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
18706  d.vkAcquirePerformanceConfigurationINTEL( m_device,
18707  reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
18708  reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
18709  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
18710 
18711  return createResultValueType( result, configuration );
18712  }
18713 
18714 # ifndef VULKAN_HPP_NO_SMART_HANDLE
18715  template <typename Dispatch>
18717  Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
18718  Dispatch const & d ) const
18719  {
18720  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18721 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18722  VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" );
18723 # endif
18724 
18725  VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
18727  d.vkAcquirePerformanceConfigurationINTEL( m_device,
18728  reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
18729  reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
18730  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" );
18731 
18732  return createResultValueType(
18734  }
18735 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
18736 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18737 
18738 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18739  template <typename Dispatch>
18740  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
18741  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18742  {
18743  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18744  return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
18745  }
18746 #else
18747  template <typename Dispatch>
18748  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
18749  Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
18750  {
18751  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18752 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18753  VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" );
18754 # endif
18755 
18757  d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
18758  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
18759 
18760  return createResultValueType( result );
18761  }
18762 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18763 
18764 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18765  template <typename Dispatch>
18766  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
18767  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18768  {
18769  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18770  return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
18771  }
18772 #else
18773  template <typename Dispatch>
18774  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
18775  Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
18776  {
18777  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18778 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18779  VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" );
18780 # endif
18781 
18783  d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
18784  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
18785 
18786  return createResultValueType( result );
18787  }
18788 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18789 
18790 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18791  template <typename Dispatch>
18792  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
18793  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18794  {
18795  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18796  return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
18797  }
18798 #else
18799  template <typename Dispatch>
18800  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
18801  Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
18802  {
18803  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18804 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18805  VULKAN_HPP_ASSERT( d.vkQueueSetPerformanceConfigurationINTEL &&
18806  "Function <vkQueueSetPerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" );
18807 # endif
18808 
18810  d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
18811  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
18812 
18813  return createResultValueType( result );
18814  }
18815 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18816 
18817  template <typename Dispatch>
18819  VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
18820  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18821  {
18822  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18823  return static_cast<Result>( d.vkGetPerformanceParameterINTEL(
18824  m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
18825  }
18826 
18827 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18828  template <typename Dispatch>
18830  Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const
18831  {
18832  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18833 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18834  VULKAN_HPP_ASSERT( d.vkGetPerformanceParameterINTEL && "Function <vkGetPerformanceParameterINTEL> requires <VK_INTEL_performance_query>" );
18835 # endif
18836 
18837  VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
18838  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPerformanceParameterINTEL(
18839  m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) );
18840  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
18841 
18842  return createResultValueType( result, value );
18843  }
18844 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18845 
18846  //=== VK_AMD_display_native_hdr ===
18847 
18848  template <typename Dispatch>
18849  VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
18850  VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,
18851  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18852  {
18853  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18854  d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
18855  }
18856 
18857 #if defined( VK_USE_PLATFORM_FUCHSIA )
18858  //=== VK_FUCHSIA_imagepipe_surface ===
18859 
18860  template <typename Dispatch>
18862  Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
18863  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
18864  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
18865  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18866  {
18867  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18868  return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance,
18869  reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ),
18870  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
18871  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
18872  }
18873 
18874 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18875  template <typename Dispatch>
18876  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
18877  Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
18878  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
18879  Dispatch const & d ) const
18880  {
18881  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18882 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18883  VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" );
18884 # endif
18885 
18886  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
18887  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA(
18888  m_instance,
18889  reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
18890  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18891  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
18892  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
18893 
18894  return createResultValueType( result, surface );
18895  }
18896 
18897 # ifndef VULKAN_HPP_NO_SMART_HANDLE
18898  template <typename Dispatch>
18899  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
18900  Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
18901  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
18902  Dispatch const & d ) const
18903  {
18904  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18905 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18906  VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" );
18907 # endif
18908 
18909  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
18910  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA(
18911  m_instance,
18912  reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
18913  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18914  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
18915  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" );
18916 
18917  return createResultValueType(
18918  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
18919  }
18920 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
18921 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18922 #endif /*VK_USE_PLATFORM_FUCHSIA*/
18923 
18924 #if defined( VK_USE_PLATFORM_METAL_EXT )
18925  //=== VK_EXT_metal_surface ===
18926 
18927  template <typename Dispatch>
18928  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
18929  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
18930  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
18931  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18932  {
18933  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18934  return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance,
18935  reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ),
18936  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
18937  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
18938  }
18939 
18940 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18941  template <typename Dispatch>
18942  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
18943  Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
18944  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
18945  Dispatch const & d ) const
18946  {
18947  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18948 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18949  VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" );
18950 # endif
18951 
18952  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
18954  d.vkCreateMetalSurfaceEXT( m_instance,
18955  reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
18956  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18957  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
18958  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
18959 
18960  return createResultValueType( result, surface );
18961  }
18962 
18963 # ifndef VULKAN_HPP_NO_SMART_HANDLE
18964  template <typename Dispatch>
18965  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
18966  Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
18967  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
18968  Dispatch const & d ) const
18969  {
18970  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18971 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
18972  VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" );
18973 # endif
18974 
18975  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
18977  d.vkCreateMetalSurfaceEXT( m_instance,
18978  reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
18979  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18980  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
18981  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" );
18982 
18983  return createResultValueType(
18984  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
18985  }
18986 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
18987 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18988 #endif /*VK_USE_PLATFORM_METAL_EXT*/
18989 
18990  //=== VK_KHR_fragment_shading_rate ===
18991 
18992  template <typename Dispatch>
18994  PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount,
18995  VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
18996  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18997  {
18998  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18999  return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
19000  m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
19001  }
19002 
19003 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19004  template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
19007  PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
19008  {
19009  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19010 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19011  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR &&
19012  "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" );
19013 # endif
19014 
19015  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates;
19016  uint32_t fragmentShadingRateCount;
19018  do
19019  {
19020  result =
19021  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
19022  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount )
19023  {
19024  fragmentShadingRates.resize( fragmentShadingRateCount );
19025  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
19026  m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
19027  }
19028  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19029  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
19030  VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
19031  if ( fragmentShadingRateCount < fragmentShadingRates.size() )
19032  {
19033  fragmentShadingRates.resize( fragmentShadingRateCount );
19034  }
19035  return createResultValueType( result, fragmentShadingRates );
19036  }
19037 
19038  template <typename PhysicalDeviceFragmentShadingRateKHRAllocator,
19039  typename Dispatch,
19040  typename std::enable_if<std::is_same<typename PhysicalDeviceFragmentShadingRateKHRAllocator::value_type,
19041  VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value,
19042  int>::type>
19045  PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
19046  Dispatch const & d ) const
19047  {
19048  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19049 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19050  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR &&
19051  "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" );
19052 # endif
19053 
19054  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates(
19055  physicalDeviceFragmentShadingRateKHRAllocator );
19056  uint32_t fragmentShadingRateCount;
19058  do
19059  {
19060  result =
19061  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
19062  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount )
19063  {
19064  fragmentShadingRates.resize( fragmentShadingRateCount );
19065  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
19066  m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
19067  }
19068  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19069  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
19070  VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
19071  if ( fragmentShadingRateCount < fragmentShadingRates.size() )
19072  {
19073  fragmentShadingRates.resize( fragmentShadingRateCount );
19074  }
19075  return createResultValueType( result, fragmentShadingRates );
19076  }
19077 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19078 
19079  template <typename Dispatch>
19080  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,
19082  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19083  {
19084  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19085  d.vkCmdSetFragmentShadingRateKHR(
19086  m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
19087  }
19088 
19089 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19090  template <typename Dispatch>
19091  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
19093  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19094  {
19095  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19096 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19097  VULKAN_HPP_ASSERT( d.vkCmdSetFragmentShadingRateKHR && "Function <vkCmdSetFragmentShadingRateKHR> requires <VK_KHR_fragment_shading_rate>" );
19098 # endif
19099 
19100  d.vkCmdSetFragmentShadingRateKHR(
19101  m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
19102  }
19103 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19104 
19105  //=== VK_EXT_buffer_device_address ===
19106 
19107  template <typename Dispatch>
19108  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
19109  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19110  {
19111  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19112  return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
19113  }
19114 
19115 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19116  template <typename Dispatch>
19117  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
19118  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19119  {
19120  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19121 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19122  VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressEXT &&
19123  "Function <vkGetBufferDeviceAddressEXT> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
19124 # endif
19125 
19126  VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
19127 
19128  return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
19129  }
19130 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19131 
19132  //=== VK_EXT_tooling_info ===
19133 
19134  template <typename Dispatch>
19135  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount,
19136  VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
19137  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19138  {
19139  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19140  return static_cast<Result>(
19141  d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
19142  }
19143 
19144 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19145  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
19148  PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
19149  {
19150  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19151 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19152  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT &&
19153  "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" );
19154 # endif
19155 
19156  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
19157  uint32_t toolCount;
19159  do
19160  {
19161  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
19162  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount )
19163  {
19164  toolProperties.resize( toolCount );
19165  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
19166  d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
19167  }
19168  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19169  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
19170  VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
19171  if ( toolCount < toolProperties.size() )
19172  {
19173  toolProperties.resize( toolCount );
19174  }
19175  return createResultValueType( result, toolProperties );
19176  }
19177 
19178  template <
19179  typename PhysicalDeviceToolPropertiesAllocator,
19180  typename Dispatch,
19181  typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value,
19182  int>::type>
19185  PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
19186  {
19187  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19188 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19189  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT &&
19190  "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" );
19191 # endif
19192 
19193  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
19194  physicalDeviceToolPropertiesAllocator );
19195  uint32_t toolCount;
19197  do
19198  {
19199  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
19200  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount )
19201  {
19202  toolProperties.resize( toolCount );
19203  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
19204  d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
19205  }
19206  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19207  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
19208  VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
19209  if ( toolCount < toolProperties.size() )
19210  {
19211  toolProperties.resize( toolCount );
19212  }
19213  return createResultValueType( result, toolProperties );
19214  }
19215 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19216 
19217  //=== VK_KHR_present_wait ===
19218 
19219 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19220  template <typename Dispatch>
19221  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
19222  uint64_t presentId,
19223  uint64_t timeout,
19224  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19225  {
19226  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19227  return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
19228  }
19229 #else
19230  template <typename Dispatch>
19232  Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const
19233  {
19234  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19235 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19236  VULKAN_HPP_ASSERT( d.vkWaitForPresentKHR && "Function <vkWaitForPresentKHR> requires <VK_KHR_present_wait>" );
19237 # endif
19238 
19240  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
19241  resultCheck( result,
19242  VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR",
19243  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
19244 
19245  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
19246  }
19247 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19248 
19249  //=== VK_NV_cooperative_matrix ===
19250 
19251  template <typename Dispatch>
19252  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV(
19253  uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19254  {
19255  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19256  return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
19257  m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
19258  }
19259 
19260 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19261  template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
19264  PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const
19265  {
19266  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19267 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19268  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV &&
19269  "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" );
19270 # endif
19271 
19272  std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
19273  uint32_t propertyCount;
19275  do
19276  {
19277  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
19278  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
19279  {
19280  properties.resize( propertyCount );
19281  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
19282  m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
19283  }
19284  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19285  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
19286  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
19287  if ( propertyCount < properties.size() )
19288  {
19289  properties.resize( propertyCount );
19290  }
19291  return createResultValueType( result, properties );
19292  }
19293 
19294  template <typename CooperativeMatrixPropertiesNVAllocator,
19295  typename Dispatch,
19296  typename std::enable_if<
19297  std::is_same<typename CooperativeMatrixPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value,
19298  int>::type>
19301  PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,
19302  Dispatch const & d ) const
19303  {
19304  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19305 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19306  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV &&
19307  "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" );
19308 # endif
19309 
19310  std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
19311  cooperativeMatrixPropertiesNVAllocator );
19312  uint32_t propertyCount;
19314  do
19315  {
19316  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
19317  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
19318  {
19319  properties.resize( propertyCount );
19320  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
19321  m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
19322  }
19323  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19324  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
19325  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
19326  if ( propertyCount < properties.size() )
19327  {
19328  properties.resize( propertyCount );
19329  }
19330  return createResultValueType( result, properties );
19331  }
19332 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19333 
19334  //=== VK_NV_coverage_reduction_mode ===
19335 
19336  template <typename Dispatch>
19337  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
19338  uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19339  {
19340  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19341  return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
19342  m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
19343  }
19344 
19345 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19346  template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
19349  PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const
19350  {
19351  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19352 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19353  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV &&
19354  "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" );
19355 # endif
19356 
19357  std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
19358  uint32_t combinationCount;
19360  do
19361  {
19362  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
19363  d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) );
19364  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount )
19365  {
19366  combinations.resize( combinationCount );
19367  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
19368  m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
19369  }
19370  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19371  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
19372  VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
19373  if ( combinationCount < combinations.size() )
19374  {
19375  combinations.resize( combinationCount );
19376  }
19377  return createResultValueType( result, combinations );
19378  }
19379 
19380  template <typename FramebufferMixedSamplesCombinationNVAllocator,
19381  typename Dispatch,
19382  typename std::enable_if<std::is_same<typename FramebufferMixedSamplesCombinationNVAllocator::value_type,
19383  VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value,
19384  int>::type>
19387  PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
19388  FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const
19389  {
19390  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19391 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19392  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV &&
19393  "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" );
19394 # endif
19395 
19396  std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
19397  framebufferMixedSamplesCombinationNVAllocator );
19398  uint32_t combinationCount;
19400  do
19401  {
19402  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
19403  d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) );
19404  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount )
19405  {
19406  combinations.resize( combinationCount );
19407  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
19408  m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
19409  }
19410  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19411  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
19412  VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
19413  if ( combinationCount < combinations.size() )
19414  {
19415  combinations.resize( combinationCount );
19416  }
19417  return createResultValueType( result, combinations );
19418  }
19419 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19420 
19421 #if defined( VK_USE_PLATFORM_WIN32_KHR )
19422  //=== VK_EXT_full_screen_exclusive ===
19423 
19424  template <typename Dispatch>
19426  PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
19427  uint32_t * pPresentModeCount,
19428  VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
19429  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19430  {
19431  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19432  return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
19433  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
19434  pPresentModeCount,
19435  reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
19436  }
19437 
19438 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19439  template <typename PresentModeKHRAllocator, typename Dispatch>
19440  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
19441  PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
19442  {
19443  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19444 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19445  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT &&
19446  "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" );
19447 # endif
19448 
19449  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
19450  uint32_t presentModeCount;
19452  do
19453  {
19454  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
19455  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) );
19456  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount )
19457  {
19458  presentModes.resize( presentModeCount );
19459  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
19460  d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
19461  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
19462  &presentModeCount,
19463  reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
19464  }
19465  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19466  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
19467  VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
19468  if ( presentModeCount < presentModes.size() )
19469  {
19470  presentModes.resize( presentModeCount );
19471  }
19472  return createResultValueType( result, presentModes );
19473  }
19474 
19475  template <typename PresentModeKHRAllocator,
19476  typename Dispatch,
19477  typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type>
19478  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
19479  PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
19480  PresentModeKHRAllocator & presentModeKHRAllocator,
19481  Dispatch const & d ) const
19482  {
19483  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19484 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19485  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT &&
19486  "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" );
19487 # endif
19488 
19489  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
19490  uint32_t presentModeCount;
19492  do
19493  {
19494  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
19495  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) );
19496  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount )
19497  {
19498  presentModes.resize( presentModeCount );
19499  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
19500  d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
19501  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
19502  &presentModeCount,
19503  reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
19504  }
19505  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
19506  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
19507  VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
19508  if ( presentModeCount < presentModes.size() )
19509  {
19510  presentModes.resize( presentModeCount );
19511  }
19512  return createResultValueType( result, presentModes );
19513  }
19514 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19515 
19516 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19517  template <typename Dispatch>
19518  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
19519  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19520  {
19521  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19522  return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
19523  }
19524 # else
19525  template <typename Dispatch>
19526  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
19527  Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
19528  {
19529  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19530 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19531  VULKAN_HPP_ASSERT( d.vkAcquireFullScreenExclusiveModeEXT && "Function <vkAcquireFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" );
19532 # endif
19533 
19535  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
19536  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
19537 
19538  return createResultValueType( result );
19539  }
19540 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19541 
19542 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19543  template <typename Dispatch>
19544  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
19545  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19546  {
19547  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19548  return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
19549  }
19550 # else
19551  template <typename Dispatch>
19552  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
19553  Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
19554  {
19555  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19556 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19557  VULKAN_HPP_ASSERT( d.vkReleaseFullScreenExclusiveModeEXT && "Function <vkReleaseFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" );
19558 # endif
19559 
19561  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
19562  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
19563 
19564  return createResultValueType( result );
19565  }
19566 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19567 
19568  template <typename Dispatch>
19570  Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
19572  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19573  {
19574  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19575  return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
19576  m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
19577  }
19578 
19579 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19580  template <typename Dispatch>
19581  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
19582  Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
19583  {
19584  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19585 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19586  VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModes2EXT &&
19587  "Function <vkGetDeviceGroupSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" );
19588 # endif
19589 
19591  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
19592  m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
19593  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
19594 
19595  return createResultValueType( result, modes );
19596  }
19597 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19598 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
19599 
19600  //=== VK_EXT_headless_surface ===
19601 
19602  template <typename Dispatch>
19603  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
19604  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
19605  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
19606  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19607  {
19608  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19609  return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance,
19610  reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
19611  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
19612  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
19613  }
19614 
19615 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19616  template <typename Dispatch>
19618  Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
19620  Dispatch const & d ) const
19621  {
19622  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19623 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19624  VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" );
19625 # endif
19626 
19627  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
19628  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT(
19629  m_instance,
19630  reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
19631  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19632  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
19633  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
19634 
19635  return createResultValueType( result, surface );
19636  }
19637 
19638 # ifndef VULKAN_HPP_NO_SMART_HANDLE
19639  template <typename Dispatch>
19641  Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
19643  Dispatch const & d ) const
19644  {
19645  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19646 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19647  VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" );
19648 # endif
19649 
19650  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
19651  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT(
19652  m_instance,
19653  reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
19654  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19655  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
19656  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" );
19657 
19658  return createResultValueType(
19660  }
19661 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
19662 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19663 
19664  //=== VK_KHR_buffer_device_address ===
19665 
19666  template <typename Dispatch>
19667  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
19668  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19669  {
19670  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19671  return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
19672  }
19673 
19674 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19675  template <typename Dispatch>
19676  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
19677  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19678  {
19679  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19680 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19681  VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressKHR &&
19682  "Function <vkGetBufferDeviceAddressKHR> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
19683 # endif
19684 
19685  VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
19686 
19687  return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
19688  }
19689 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19690 
19691  template <typename Dispatch>
19692  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
19693  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19694  {
19695  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19696  return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
19697  }
19698 
19699 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19700  template <typename Dispatch>
19701  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
19702  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19703  {
19704  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19705 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19706  VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddressKHR &&
19707  "Function <vkGetBufferOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
19708 # endif
19709 
19710  uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
19711 
19712  return result;
19713  }
19714 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19715 
19716  template <typename Dispatch>
19717  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
19718  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19719  {
19720  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19721  return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
19722  }
19723 
19724 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19725  template <typename Dispatch>
19726  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
19727  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19728  {
19729  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19730 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19731  VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddressKHR &&
19732  "Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
19733 # endif
19734 
19735  uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
19736 
19737  return result;
19738  }
19739 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19740 
19741  //=== VK_EXT_line_rasterization ===
19742 
19743  template <typename Dispatch>
19744  VULKAN_HPP_INLINE void
19745  CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19746  {
19747  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19748  d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
19749  }
19750 
19751  //=== VK_EXT_host_query_reset ===
19752 
19753  template <typename Dispatch>
19754  VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
19755  uint32_t firstQuery,
19756  uint32_t queryCount,
19757  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19758  {
19759  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19760  d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
19761  }
19762 
19763  //=== VK_EXT_extended_dynamic_state ===
19764 
19765  template <typename Dispatch>
19766  VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19767  {
19768  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19769  d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
19770  }
19771 
19772  template <typename Dispatch>
19773  VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19774  {
19775  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19776  d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
19777  }
19778 
19779  template <typename Dispatch>
19780  VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
19781  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19782  {
19783  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19784  d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
19785  }
19786 
19787  template <typename Dispatch>
19788  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount,
19789  const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
19790  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19791  {
19792  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19793  d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
19794  }
19795 
19796 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19797  template <typename Dispatch>
19798  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
19799  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19800  {
19801  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19802 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19803  VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCountEXT &&
19804  "Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
19805 # endif
19806 
19807  d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
19808  }
19809 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19810 
19811  template <typename Dispatch>
19812  VULKAN_HPP_INLINE void
19813  CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19814  {
19815  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19816  d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
19817  }
19818 
19819 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19820  template <typename Dispatch>
19821  VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
19822  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19823  {
19824  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19825 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19826  VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCountEXT &&
19827  "Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
19828 # endif
19829 
19830  d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
19831  }
19832 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19833 
19834  template <typename Dispatch>
19835  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
19836  uint32_t bindingCount,
19837  const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
19838  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
19839  const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
19840  const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
19841  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19842  {
19843  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19844  d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
19845  firstBinding,
19846  bindingCount,
19847  reinterpret_cast<const VkBuffer *>( pBuffers ),
19848  reinterpret_cast<const VkDeviceSize *>( pOffsets ),
19849  reinterpret_cast<const VkDeviceSize *>( pSizes ),
19850  reinterpret_cast<const VkDeviceSize *>( pStrides ) );
19851  }
19852 
19853 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19854  template <typename Dispatch>
19855  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
19856  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
19857  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
19858  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
19859  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
19860  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
19861  {
19862  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19863 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19864  VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2EXT &&
19865  "Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
19866 # endif
19867 # ifdef VULKAN_HPP_NO_EXCEPTIONS
19868  VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
19869  VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
19870  VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
19871 # else
19872  if ( buffers.size() != offsets.size() )
19873  {
19874  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
19875  }
19876  if ( !sizes.empty() && buffers.size() != sizes.size() )
19877  {
19878  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
19879  }
19880  if ( !strides.empty() && buffers.size() != strides.size() )
19881  {
19882  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
19883  }
19884 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
19885 
19886  d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
19887  firstBinding,
19888  buffers.size(),
19889  reinterpret_cast<const VkBuffer *>( buffers.data() ),
19890  reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
19891  reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
19892  reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
19893  }
19894 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19895 
19896  template <typename Dispatch>
19897  VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19898  {
19899  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19900  d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
19901  }
19902 
19903  template <typename Dispatch>
19904  VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19905  {
19906  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19907  d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
19908  }
19909 
19910  template <typename Dispatch>
19911  VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19912  {
19913  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19914  d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
19915  }
19916 
19917  template <typename Dispatch>
19918  VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
19919  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19920  {
19921  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19922  d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
19923  }
19924 
19925  template <typename Dispatch>
19926  VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19927  {
19928  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19929  d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
19930  }
19931 
19932  template <typename Dispatch>
19933  VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
19936  VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
19938  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19939  {
19940  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19941  d.vkCmdSetStencilOpEXT( m_commandBuffer,
19942  static_cast<VkStencilFaceFlags>( faceMask ),
19943  static_cast<VkStencilOp>( failOp ),
19944  static_cast<VkStencilOp>( passOp ),
19945  static_cast<VkStencilOp>( depthFailOp ),
19946  static_cast<VkCompareOp>( compareOp ) );
19947  }
19948 
19949  //=== VK_KHR_deferred_host_operations ===
19950 
19951  template <typename Dispatch>
19952  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
19953  VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
19954  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19955  {
19956  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19957  return static_cast<Result>( d.vkCreateDeferredOperationKHR(
19958  m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
19959  }
19960 
19961 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19962  template <typename Dispatch>
19964  Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
19965  {
19966  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19967 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19968  VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" );
19969 # endif
19970 
19971  VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
19972  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR(
19973  m_device,
19974  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19975  reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
19976  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
19977 
19978  return createResultValueType( result, deferredOperation );
19979  }
19980 
19981 # ifndef VULKAN_HPP_NO_SMART_HANDLE
19982  template <typename Dispatch>
19984  Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
19985  {
19986  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19987 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
19988  VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" );
19989 # endif
19990 
19991  VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
19992  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR(
19993  m_device,
19994  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19995  reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
19996  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" );
19997 
19998  return createResultValueType(
19999  result, UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( deferredOperation, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
20000  }
20001 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
20002 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20003 
20004  template <typename Dispatch>
20005  VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
20006  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
20007  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20008  {
20009  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20010  d.vkDestroyDeferredOperationKHR(
20011  m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
20012  }
20013 
20014 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20015  template <typename Dispatch>
20016  VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
20018  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20019  {
20020  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20021 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20022  VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" );
20023 # endif
20024 
20025  d.vkDestroyDeferredOperationKHR(
20026  m_device,
20027  static_cast<VkDeferredOperationKHR>( operation ),
20028  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
20029  }
20030 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20031 
20032  template <typename Dispatch>
20033  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
20034  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
20035  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20036  {
20037  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20038  d.vkDestroyDeferredOperationKHR(
20039  m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
20040  }
20041 
20042 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20043  template <typename Dispatch>
20044  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
20046  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20047  {
20048  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20049 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20050  VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" );
20051 # endif
20052 
20053  d.vkDestroyDeferredOperationKHR(
20054  m_device,
20055  static_cast<VkDeferredOperationKHR>( operation ),
20056  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
20057  }
20058 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20059 
20060  template <typename Dispatch>
20061  VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
20062  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20063  {
20064  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20065  return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
20066  }
20067 
20068 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
20069  template <typename Dispatch>
20070  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
20071  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20072  {
20073  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20074  return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
20075  }
20076 #else
20077  template <typename Dispatch>
20079  Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20080  {
20081  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20082 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20083  VULKAN_HPP_ASSERT( d.vkGetDeferredOperationResultKHR && "Function <vkGetDeferredOperationResultKHR> requires <VK_KHR_deferred_host_operations>" );
20084 # endif
20085 
20087  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
20088 
20089  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
20090  }
20091 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20092 
20093 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
20094  template <typename Dispatch>
20095  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
20096  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20097  {
20098  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20099  return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
20100  }
20101 #else
20102  template <typename Dispatch>
20103  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
20104  Dispatch const & d ) const
20105  {
20106  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20107 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20108  VULKAN_HPP_ASSERT( d.vkDeferredOperationJoinKHR && "Function <vkDeferredOperationJoinKHR> requires <VK_KHR_deferred_host_operations>" );
20109 # endif
20110 
20112  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
20113  resultCheck( result,
20114  VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR",
20115  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
20116 
20117  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
20118  }
20119 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20120 
20121  //=== VK_KHR_pipeline_executable_properties ===
20122 
20123  template <typename Dispatch>
20124  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
20125  uint32_t * pExecutableCount,
20126  VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
20127  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20128  {
20129  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20130  return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device,
20131  reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
20132  pExecutableCount,
20133  reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
20134  }
20135 
20136 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20137  template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
20140  Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const
20141  {
20142  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20143 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20144  VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR &&
20145  "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" );
20146 # endif
20147 
20148  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
20149  uint32_t executableCount;
20151  do
20152  {
20153  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
20154  d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
20155  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount )
20156  {
20157  properties.resize( executableCount );
20158  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
20159  d.vkGetPipelineExecutablePropertiesKHR( m_device,
20160  reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
20161  &executableCount,
20162  reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
20163  }
20164  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
20165  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
20166  VULKAN_HPP_ASSERT( executableCount <= properties.size() );
20167  if ( executableCount < properties.size() )
20168  {
20169  properties.resize( executableCount );
20170  }
20171  return createResultValueType( result, properties );
20172  }
20173 
20174  template <typename PipelineExecutablePropertiesKHRAllocator,
20175  typename Dispatch,
20176  typename std::enable_if<
20177  std::is_same<typename PipelineExecutablePropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value,
20178  int>::type>
20181  Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,
20182  PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
20183  Dispatch const & d ) const
20184  {
20185  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20186 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20187  VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR &&
20188  "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" );
20189 # endif
20190 
20191  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
20192  pipelineExecutablePropertiesKHRAllocator );
20193  uint32_t executableCount;
20195  do
20196  {
20197  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
20198  d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
20199  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount )
20200  {
20201  properties.resize( executableCount );
20202  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
20203  d.vkGetPipelineExecutablePropertiesKHR( m_device,
20204  reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
20205  &executableCount,
20206  reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
20207  }
20208  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
20209  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
20210  VULKAN_HPP_ASSERT( executableCount <= properties.size() );
20211  if ( executableCount < properties.size() )
20212  {
20213  properties.resize( executableCount );
20214  }
20215  return createResultValueType( result, properties );
20216  }
20217 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20218 
20219  template <typename Dispatch>
20221  Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
20222  uint32_t * pStatisticCount,
20223  VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
20224  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20225  {
20226  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20227  return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device,
20228  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
20229  pStatisticCount,
20230  reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
20231  }
20232 
20233 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20234  template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
20237  Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
20238  {
20239  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20240 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20241  VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR &&
20242  "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" );
20243 # endif
20244 
20245  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
20246  uint32_t statisticCount;
20248  do
20249  {
20250  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR(
20251  m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) );
20252  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount )
20253  {
20254  statistics.resize( statisticCount );
20255  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
20256  d.vkGetPipelineExecutableStatisticsKHR( m_device,
20257  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
20258  &statisticCount,
20259  reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
20260  }
20261  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
20262  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
20263  VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
20264  if ( statisticCount < statistics.size() )
20265  {
20266  statistics.resize( statisticCount );
20267  }
20268  return createResultValueType( result, statistics );
20269  }
20270 
20271  template <typename PipelineExecutableStatisticKHRAllocator,
20272  typename Dispatch,
20273  typename std::enable_if<
20274  std::is_same<typename PipelineExecutableStatisticKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value,
20275  int>::type>
20278  Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
20279  PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
20280  Dispatch const & d ) const
20281  {
20282  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20283 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20284  VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR &&
20285  "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" );
20286 # endif
20287 
20288  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics(
20289  pipelineExecutableStatisticKHRAllocator );
20290  uint32_t statisticCount;
20292  do
20293  {
20294  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR(
20295  m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) );
20296  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount )
20297  {
20298  statistics.resize( statisticCount );
20299  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
20300  d.vkGetPipelineExecutableStatisticsKHR( m_device,
20301  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
20302  &statisticCount,
20303  reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
20304  }
20305  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
20306  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
20307  VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
20308  if ( statisticCount < statistics.size() )
20309  {
20310  statistics.resize( statisticCount );
20311  }
20312  return createResultValueType( result, statistics );
20313  }
20314 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20315 
20316  template <typename Dispatch>
20318  Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
20319  uint32_t * pInternalRepresentationCount,
20320  VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
20321  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20322  {
20323  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20324  return static_cast<Result>(
20325  d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device,
20326  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
20327  pInternalRepresentationCount,
20328  reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
20329  }
20330 
20331 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20332  template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
20334  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
20335  Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
20336  {
20337  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20338 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20339  VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR &&
20340  "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" );
20341 # endif
20342 
20343  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
20344  internalRepresentations;
20345  uint32_t internalRepresentationCount;
20347  do
20348  {
20349  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
20350  m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) );
20351  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount )
20352  {
20353  internalRepresentations.resize( internalRepresentationCount );
20354  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
20355  m_device,
20356  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
20357  &internalRepresentationCount,
20358  reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
20359  }
20360  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
20361  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
20362  VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
20363  if ( internalRepresentationCount < internalRepresentations.size() )
20364  {
20365  internalRepresentations.resize( internalRepresentationCount );
20366  }
20367  return createResultValueType( result, internalRepresentations );
20368  }
20369 
20370  template <typename PipelineExecutableInternalRepresentationKHRAllocator,
20371  typename Dispatch,
20372  typename std::enable_if<std::is_same<typename PipelineExecutableInternalRepresentationKHRAllocator::value_type,
20373  VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value,
20374  int>::type>
20376  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
20377  Device::getPipelineExecutableInternalRepresentationsKHR(
20378  const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
20379  PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
20380  Dispatch const & d ) const
20381  {
20382  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20383 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20384  VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR &&
20385  "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" );
20386 # endif
20387 
20388  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
20389  internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
20390  uint32_t internalRepresentationCount;
20392  do
20393  {
20394  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
20395  m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) );
20396  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount )
20397  {
20398  internalRepresentations.resize( internalRepresentationCount );
20399  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
20400  m_device,
20401  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
20402  &internalRepresentationCount,
20403  reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
20404  }
20405  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
20406  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
20407  VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
20408  if ( internalRepresentationCount < internalRepresentations.size() )
20409  {
20410  internalRepresentations.resize( internalRepresentationCount );
20411  }
20412  return createResultValueType( result, internalRepresentations );
20413  }
20414 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20415 
20416  //=== VK_EXT_host_image_copy ===
20417 
20418  template <typename Dispatch>
20419  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo,
20420  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20421  {
20422  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20423  return static_cast<Result>( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( pCopyMemoryToImageInfo ) ) );
20424  }
20425 
20426 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20427  template <typename Dispatch>
20429  Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, Dispatch const & d ) const
20430  {
20431  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20432 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20433  VULKAN_HPP_ASSERT( d.vkCopyMemoryToImageEXT && "Function <vkCopyMemoryToImageEXT> requires <VK_EXT_host_image_copy>" );
20434 # endif
20435 
20437  d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( &copyMemoryToImageInfo ) ) );
20438  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" );
20439 
20440  return createResultValueType( result );
20441  }
20442 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20443 
20444  template <typename Dispatch>
20445  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo,
20446  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20447  {
20448  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20449  return static_cast<Result>( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( pCopyImageToMemoryInfo ) ) );
20450  }
20451 
20452 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20453  template <typename Dispatch>
20455  Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, Dispatch const & d ) const
20456  {
20457  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20458 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20459  VULKAN_HPP_ASSERT( d.vkCopyImageToMemoryEXT && "Function <vkCopyImageToMemoryEXT> requires <VK_EXT_host_image_copy>" );
20460 # endif
20461 
20463  d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( &copyImageToMemoryInfo ) ) );
20464  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" );
20465 
20466  return createResultValueType( result );
20467  }
20468 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20469 
20470  template <typename Dispatch>
20471  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo,
20472  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20473  {
20474  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20475  return static_cast<Result>( d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( pCopyImageToImageInfo ) ) );
20476  }
20477 
20478 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20479  template <typename Dispatch>
20481  Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, Dispatch const & d ) const
20482  {
20483  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20484 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20485  VULKAN_HPP_ASSERT( d.vkCopyImageToImageEXT && "Function <vkCopyImageToImageEXT> requires <VK_EXT_host_image_copy>" );
20486 # endif
20487 
20489  d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( &copyImageToImageInfo ) ) );
20490  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" );
20491 
20492  return createResultValueType( result );
20493  }
20494 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20495 
20496  template <typename Dispatch>
20497  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount,
20498  const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions,
20499  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20500  {
20501  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20502  return static_cast<Result>(
20503  d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( pTransitions ) ) );
20504  }
20505 
20506 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20507  template <typename Dispatch>
20509  Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions,
20510  Dispatch const & d ) const
20511  {
20512  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20513 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20514  VULKAN_HPP_ASSERT( d.vkTransitionImageLayoutEXT && "Function <vkTransitionImageLayoutEXT> requires <VK_EXT_host_image_copy>" );
20515 # endif
20516 
20518  d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( transitions.data() ) ) );
20519  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" );
20520 
20521  return createResultValueType( result );
20522  }
20523 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20524 
20525  template <typename Dispatch>
20526  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
20527  const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
20528  VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
20529  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20530  {
20531  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20532  d.vkGetImageSubresourceLayout2EXT( m_device,
20533  static_cast<VkImage>( image ),
20534  reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
20535  reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
20536  }
20537 
20538 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20539  template <typename Dispatch>
20540  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2EXT(
20541  VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20542  {
20543  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20544 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20546  d.vkGetImageSubresourceLayout2EXT &&
20547  "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
20548 # endif
20549 
20550  VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
20551  d.vkGetImageSubresourceLayout2EXT( m_device,
20552  static_cast<VkImage>( image ),
20553  reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
20554  reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
20555 
20556  return layout;
20557  }
20558 
20559  template <typename X, typename Y, typename... Z, typename Dispatch>
20560  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT(
20561  VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20562  {
20563  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20564 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20566  d.vkGetImageSubresourceLayout2EXT &&
20567  "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
20568 # endif
20569 
20570  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
20571  VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
20572  d.vkGetImageSubresourceLayout2EXT( m_device,
20573  static_cast<VkImage>( image ),
20574  reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
20575  reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
20576 
20577  return structureChain;
20578  }
20579 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20580 
20581  //=== VK_KHR_map_memory2 ===
20582 
20583  template <typename Dispatch>
20584  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo,
20585  void ** ppData,
20586  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20587  {
20588  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20589  return static_cast<Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( pMemoryMapInfo ), ppData ) );
20590  }
20591 
20592 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20593  template <typename Dispatch>
20595  Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo, Dispatch const & d ) const
20596  {
20597  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20599  VULKAN_HPP_ASSERT( d.vkMapMemory2KHR && "Function <vkMapMemory2KHR> requires <VK_KHR_map_memory2>" );
20600 # endif
20601 
20602  void * pData;
20604  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( &memoryMapInfo ), &pData ) );
20605  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" );
20606 
20607  return createResultValueType( result, pData );
20608  }
20609 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20610 
20611  template <typename Dispatch>
20612  VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo,
20613  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20614  {
20615  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20616  return static_cast<Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( pMemoryUnmapInfo ) ) );
20617  }
20618 
20619 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20620  template <typename Dispatch>
20621  VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo,
20622  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20623  {
20624  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20625 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20626  VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function <vkUnmapMemory2KHR> requires <VK_KHR_map_memory2>" );
20627 # endif
20628 
20629  d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) );
20630  }
20631 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20632 
20633  //=== VK_EXT_swapchain_maintenance1 ===
20634 
20635  template <typename Dispatch>
20636  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo,
20637  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20638  {
20639  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20640  return static_cast<Result>( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) );
20641  }
20642 
20643 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20644  template <typename Dispatch>
20646  Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const
20647  {
20648  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20649 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20650  VULKAN_HPP_ASSERT( d.vkReleaseSwapchainImagesEXT && "Function <vkReleaseSwapchainImagesEXT> requires <VK_EXT_swapchain_maintenance1>" );
20651 # endif
20652 
20654  d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) ) );
20655  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" );
20656 
20657  return createResultValueType( result );
20658  }
20659 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20660 
20661  //=== VK_NV_device_generated_commands ===
20662 
20663  template <typename Dispatch>
20664  VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
20665  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
20666  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20667  {
20668  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20669  d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
20670  reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
20671  reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
20672  }
20673 
20674 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20675  template <typename Dispatch>
20676  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
20677  Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
20678  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20679  {
20680  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20681 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20682  VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV &&
20683  "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" );
20684 # endif
20685 
20686  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
20687  d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
20688  reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
20689  reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20690 
20691  return memoryRequirements;
20692  }
20693 
20694  template <typename X, typename Y, typename... Z, typename Dispatch>
20695  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
20696  Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
20697  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20698  {
20699  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20700 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20701  VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV &&
20702  "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" );
20703 # endif
20704 
20705  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
20706  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
20707  d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
20708  reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
20709  reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20710 
20711  return structureChain;
20712  }
20713 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20714 
20715  template <typename Dispatch>
20716  VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
20717  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20718  {
20719  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20720  d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
20721  }
20722 
20723 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20724  template <typename Dispatch>
20725  VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
20726  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20727  {
20728  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20729 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20730  VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsNV && "Function <vkCmdPreprocessGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" );
20731 # endif
20732 
20733  d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
20734  }
20735 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20736 
20737  template <typename Dispatch>
20738  VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
20739  const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
20740  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20741  {
20742  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20743  d.vkCmdExecuteGeneratedCommandsNV(
20744  m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
20745  }
20746 
20747 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20748  template <typename Dispatch>
20749  VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
20750  const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
20751  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20752  {
20753  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20754 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20755  VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsNV && "Function <vkCmdExecuteGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" );
20756 # endif
20757 
20758  d.vkCmdExecuteGeneratedCommandsNV(
20759  m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
20760  }
20761 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20762 
20763  template <typename Dispatch>
20764  VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
20765  VULKAN_HPP_NAMESPACE::Pipeline pipeline,
20766  uint32_t groupIndex,
20767  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20768  {
20769  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20770  d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
20771  }
20772 
20773  template <typename Dispatch>
20775  Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
20776  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
20777  VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
20778  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20779  {
20780  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20781  return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device,
20782  reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
20783  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
20784  reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
20785  }
20786 
20787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20788  template <typename Dispatch>
20790  Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
20792  Dispatch const & d ) const
20793  {
20794  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20795 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20796  VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" );
20797 # endif
20798 
20799  VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
20800  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV(
20801  m_device,
20802  reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
20803  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
20804  reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
20805  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
20806 
20807  return createResultValueType( result, indirectCommandsLayout );
20808  }
20809 
20810 # ifndef VULKAN_HPP_NO_SMART_HANDLE
20811  template <typename Dispatch>
20813  Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
20815  Dispatch const & d ) const
20816  {
20817  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20818 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20819  VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" );
20820 # endif
20821 
20822  VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
20823  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV(
20824  m_device,
20825  reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
20826  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
20827  reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
20828  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" );
20829 
20830  return createResultValueType( result,
20832  indirectCommandsLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
20833  }
20834 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
20835 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20836 
20837  template <typename Dispatch>
20838  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
20839  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
20840  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20841  {
20842  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20843  d.vkDestroyIndirectCommandsLayoutNV(
20844  m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
20845  }
20846 
20847 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20848  template <typename Dispatch>
20849  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
20851  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20852  {
20853  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20854 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20855  VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" );
20856 # endif
20857 
20858  d.vkDestroyIndirectCommandsLayoutNV(
20859  m_device,
20860  static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
20861  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
20862  }
20863 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20864 
20865  template <typename Dispatch>
20866  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
20867  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
20868  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20869  {
20870  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20871  d.vkDestroyIndirectCommandsLayoutNV(
20872  m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
20873  }
20874 
20875 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20876  template <typename Dispatch>
20877  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
20879  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20880  {
20881  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20882 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20883  VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" );
20884 # endif
20885 
20886  d.vkDestroyIndirectCommandsLayoutNV(
20887  m_device,
20888  static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
20889  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
20890  }
20891 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20892 
20893  //=== VK_EXT_depth_bias_control ===
20894 
20895  template <typename Dispatch>
20896  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo,
20897  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20898  {
20899  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20900  d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) );
20901  }
20902 
20903 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20904  template <typename Dispatch>
20905  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo,
20906  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20907  {
20908  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20909 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20910  VULKAN_HPP_ASSERT( d.vkCmdSetDepthBias2EXT && "Function <vkCmdSetDepthBias2EXT> requires <VK_EXT_depth_bias_control>" );
20911 # endif
20912 
20913  d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( &depthBiasInfo ) );
20914  }
20915 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20916 
20917  //=== VK_EXT_acquire_drm_display ===
20918 
20919 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
20920  template <typename Dispatch>
20921  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd,
20922  VULKAN_HPP_NAMESPACE::DisplayKHR display,
20923  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20924  {
20925  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20926  return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
20927  }
20928 #else
20929  template <typename Dispatch>
20930  VULKAN_HPP_INLINE typename ResultValueType<void>::type
20931  PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
20932  {
20933  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20934 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20935  VULKAN_HPP_ASSERT( d.vkAcquireDrmDisplayEXT && "Function <vkAcquireDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" );
20936 # endif
20937 
20939  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
20940  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
20941 
20942  return createResultValueType( result );
20943  }
20944 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20945 
20946  template <typename Dispatch>
20947  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd,
20948  uint32_t connectorId,
20949  VULKAN_HPP_NAMESPACE::DisplayKHR * display,
20950  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20951  {
20952  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20953  return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
20954  }
20955 
20956 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20957  template <typename Dispatch>
20959  PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
20960  {
20961  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20962 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20963  VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" );
20964 # endif
20965 
20966  VULKAN_HPP_NAMESPACE::DisplayKHR display;
20968  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
20969  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" );
20970 
20971  return createResultValueType( result, display );
20972  }
20973 
20974 # ifndef VULKAN_HPP_NO_SMART_HANDLE
20975  template <typename Dispatch>
20977  PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
20978  {
20979  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20980 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
20981  VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" );
20982 # endif
20983 
20984  VULKAN_HPP_NAMESPACE::DisplayKHR display;
20986  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
20987  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" );
20988 
20989  return createResultValueType( result,
20991  }
20992 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
20993 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20994 
20995  //=== VK_EXT_private_data ===
20996 
20997  template <typename Dispatch>
20998  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
20999  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21000  VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
21001  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21002  {
21003  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21004  return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device,
21005  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
21006  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
21007  reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
21008  }
21009 
21010 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21011  template <typename Dispatch>
21013  Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
21015  Dispatch const & d ) const
21016  {
21017  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21018 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21019  VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
21020 # endif
21021 
21022  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
21023  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT(
21024  m_device,
21025  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
21026  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21027  reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
21028  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
21029 
21030  return createResultValueType( result, privateDataSlot );
21031  }
21032 
21033 # ifndef VULKAN_HPP_NO_SMART_HANDLE
21034  template <typename Dispatch>
21036  Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
21038  Dispatch const & d ) const
21039  {
21040  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21041 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21042  VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
21043 # endif
21044 
21045  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
21046  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT(
21047  m_device,
21048  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
21049  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21050  reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
21051  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" );
21052 
21053  return createResultValueType(
21054  result, UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
21055  }
21056 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
21057 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21058 
21059  template <typename Dispatch>
21060  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
21061  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21062  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21063  {
21064  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21065  d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
21066  }
21067 
21068 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21069  template <typename Dispatch>
21070  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
21072  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21073  {
21074  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21075 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21076  VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlotEXT && "Function <vkDestroyPrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
21077 # endif
21078 
21079  d.vkDestroyPrivateDataSlotEXT(
21080  m_device,
21081  static_cast<VkPrivateDataSlot>( privateDataSlot ),
21082  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
21083  }
21084 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21085 
21086 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
21087  template <typename Dispatch>
21088  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
21089  uint64_t objectHandle,
21090  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
21091  uint64_t data,
21092  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21093  {
21094  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21095  return static_cast<Result>(
21096  d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
21097  }
21098 #else
21099  template <typename Dispatch>
21100  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
21101  uint64_t objectHandle,
21102  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
21103  uint64_t data,
21104  Dispatch const & d ) const
21105  {
21106  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21107 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21108  VULKAN_HPP_ASSERT( d.vkSetPrivateDataEXT && "Function <vkSetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
21109 # endif
21110 
21112  d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
21113  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
21114 
21115  return createResultValueType( result );
21116  }
21117 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21118 
21119  template <typename Dispatch>
21120  VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
21121  uint64_t objectHandle,
21122  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
21123  uint64_t * pData,
21124  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21125  {
21126  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21127  d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
21128  }
21129 
21130 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21131  template <typename Dispatch>
21132  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
21133  uint64_t objectHandle,
21134  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
21135  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21136  {
21137  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21138 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21139  VULKAN_HPP_ASSERT( d.vkGetPrivateDataEXT && "Function <vkGetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
21140 # endif
21141 
21142  uint64_t data;
21143  d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
21144 
21145  return data;
21146  }
21147 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21148 
21149  //=== VK_KHR_video_encode_queue ===
21150 
21151  template <typename Dispatch>
21153  PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo,
21154  VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties,
21155  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21156  {
21157  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21158  return static_cast<Result>(
21159  d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice,
21160  reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( pQualityLevelInfo ),
21161  reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( pQualityLevelProperties ) ) );
21162  }
21163 
21164 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21165  template <typename Dispatch>
21167  PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,
21168  Dispatch const & d ) const
21169  {
21170  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21171 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21172  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR &&
21173  "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" );
21174 # endif
21175 
21176  VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties;
21178  d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice,
21179  reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ),
21180  reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) );
21181  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" );
21182 
21183  return createResultValueType( result, qualityLevelProperties );
21184  }
21185 
21186  template <typename X, typename Y, typename... Z, typename Dispatch>
21187  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
21188  PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,
21189  Dispatch const & d ) const
21190  {
21191  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21192 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21193  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR &&
21194  "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" );
21195 # endif
21196 
21197  StructureChain<X, Y, Z...> structureChain;
21198  VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties =
21199  structureChain.template get<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>();
21201  d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice,
21202  reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ),
21203  reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) );
21204  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" );
21205 
21206  return createResultValueType( result, structureChain );
21207  }
21208 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21209 
21210  template <typename Dispatch>
21212  Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo,
21213  VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo,
21214  size_t * pDataSize,
21215  void * pData,
21216  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21217  {
21218  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21219  return static_cast<Result>(
21220  d.vkGetEncodedVideoSessionParametersKHR( m_device,
21221  reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( pVideoSessionParametersInfo ),
21222  reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( pFeedbackInfo ),
21223  pDataSize,
21224  pData ) );
21225  }
21226 
21227 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21228  template <typename Uint8_tAllocator, typename Dispatch>
21231  Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,
21232  Dispatch const & d ) const
21233  {
21234  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21235 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21236  VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" );
21237 # endif
21238 
21239  std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_;
21240  VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first;
21241  std::vector<uint8_t, Uint8_tAllocator> & data = data_.second;
21242  size_t dataSize;
21244  do
21245  {
21246  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21247  d.vkGetEncodedVideoSessionParametersKHR( m_device,
21248  reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
21249  reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
21250  &dataSize,
21251  nullptr ) );
21252  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
21253  {
21254  data.resize( dataSize );
21255  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21256  d.vkGetEncodedVideoSessionParametersKHR( m_device,
21257  reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
21258  reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
21259  &dataSize,
21260  reinterpret_cast<void *>( data.data() ) ) );
21261  }
21262  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
21263  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
21264 
21265  return createResultValueType( result, data_ );
21266  }
21267 
21268  template <typename Uint8_tAllocator,
21269  typename Dispatch,
21270  typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type>
21273  Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,
21274  Uint8_tAllocator & uint8_tAllocator,
21275  Dispatch const & d ) const
21276  {
21277  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21278 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21279  VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" );
21280 # endif
21281 
21282  std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_(
21283  std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) );
21284  VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first;
21285  std::vector<uint8_t, Uint8_tAllocator> & data = data_.second;
21286  size_t dataSize;
21288  do
21289  {
21290  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21291  d.vkGetEncodedVideoSessionParametersKHR( m_device,
21292  reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
21293  reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
21294  &dataSize,
21295  nullptr ) );
21296  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
21297  {
21298  data.resize( dataSize );
21299  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21300  d.vkGetEncodedVideoSessionParametersKHR( m_device,
21301  reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
21302  reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
21303  &dataSize,
21304  reinterpret_cast<void *>( data.data() ) ) );
21305  }
21306  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
21307  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
21308 
21309  return createResultValueType( result, data_ );
21310  }
21311 
21312  template <typename X, typename Y, typename... Z, typename Uint8_tAllocator, typename Dispatch>
21314  typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type
21315  Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,
21316  Dispatch const & d ) const
21317  {
21318  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21319 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21320  VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" );
21321 # endif
21322 
21323  std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_;
21324  VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo =
21325  data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>();
21326  std::vector<uint8_t, Uint8_tAllocator> & data = data_.second;
21327  size_t dataSize;
21329  do
21330  {
21331  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21332  d.vkGetEncodedVideoSessionParametersKHR( m_device,
21333  reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
21334  reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
21335  &dataSize,
21336  nullptr ) );
21337  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
21338  {
21339  data.resize( dataSize );
21340  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21341  d.vkGetEncodedVideoSessionParametersKHR( m_device,
21342  reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
21343  reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
21344  &dataSize,
21345  reinterpret_cast<void *>( data.data() ) ) );
21346  }
21347  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
21348  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
21349 
21350  return createResultValueType( result, data_ );
21351  }
21352 
21353  template <typename X,
21354  typename Y,
21355  typename... Z,
21356  typename Uint8_tAllocator,
21357  typename Dispatch,
21358  typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type>
21360  typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type
21361  Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,
21362  Uint8_tAllocator & uint8_tAllocator,
21363  Dispatch const & d ) const
21364  {
21365  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21366 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21367  VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" );
21368 # endif
21369 
21370  std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_(
21371  std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) );
21372  VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo =
21373  data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>();
21374  std::vector<uint8_t, Uint8_tAllocator> & data = data_.second;
21375  size_t dataSize;
21377  do
21378  {
21379  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21380  d.vkGetEncodedVideoSessionParametersKHR( m_device,
21381  reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
21382  reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
21383  &dataSize,
21384  nullptr ) );
21385  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
21386  {
21387  data.resize( dataSize );
21388  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21389  d.vkGetEncodedVideoSessionParametersKHR( m_device,
21390  reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
21391  reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
21392  &dataSize,
21393  reinterpret_cast<void *>( data.data() ) ) );
21394  }
21395  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
21396  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
21397 
21398  return createResultValueType( result, data_ );
21399  }
21400 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21401 
21402  template <typename Dispatch>
21403  VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
21404  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21405  {
21406  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21407  d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
21408  }
21409 
21410 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21411  template <typename Dispatch>
21412  VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,
21413  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21414  {
21415  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21416 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21417  VULKAN_HPP_ASSERT( d.vkCmdEncodeVideoKHR && "Function <vkCmdEncodeVideoKHR> requires <VK_KHR_video_encode_queue>" );
21418 # endif
21419 
21420  d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
21421  }
21422 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21423 
21424 #if defined( VK_ENABLE_BETA_EXTENSIONS )
21425  //=== VK_NV_cuda_kernel_launch ===
21426 
21427  template <typename Dispatch>
21428  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo,
21429  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21430  VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule,
21431  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21432  {
21433  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21434  return static_cast<Result>( d.vkCreateCudaModuleNV( m_device,
21435  reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ),
21436  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
21437  reinterpret_cast<VkCudaModuleNV *>( pModule ) ) );
21438  }
21439 
21440 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21441  template <typename Dispatch>
21443  Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
21445  Dispatch const & d ) const
21446  {
21447  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21448 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21449  VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" );
21450 # endif
21451 
21452  VULKAN_HPP_NAMESPACE::CudaModuleNV module;
21454  d.vkCreateCudaModuleNV( m_device,
21455  reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ),
21456  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21457  reinterpret_cast<VkCudaModuleNV *>( &module ) ) );
21458  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" );
21459 
21460  return createResultValueType( result, module );
21461  }
21462 
21463 # ifndef VULKAN_HPP_NO_SMART_HANDLE
21464  template <typename Dispatch>
21466  Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
21468  Dispatch const & d ) const
21469  {
21470  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21471 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21472  VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" );
21473 # endif
21474 
21475  VULKAN_HPP_NAMESPACE::CudaModuleNV module;
21477  d.vkCreateCudaModuleNV( m_device,
21478  reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ),
21479  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21480  reinterpret_cast<VkCudaModuleNV *>( &module ) ) );
21481  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" );
21482 
21483  return createResultValueType(
21485  }
21486 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
21487 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21488 
21489  template <typename Dispatch>
21490  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
21491  size_t * pCacheSize,
21492  void * pCacheData,
21493  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21494  {
21495  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21496  return static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) );
21497  }
21498 
21499 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21500  template <typename Uint8_tAllocator, typename Dispatch>
21502  Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const
21503  {
21504  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21505 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21506  VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" );
21507 # endif
21508 
21509  std::vector<uint8_t, Uint8_tAllocator> cacheData;
21510  size_t cacheSize;
21512  do
21513  {
21514  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) );
21515  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize )
21516  {
21517  cacheData.resize( cacheSize );
21518  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21519  d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) );
21520  }
21521  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
21522  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" );
21523  VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() );
21524  if ( cacheSize < cacheData.size() )
21525  {
21526  cacheData.resize( cacheSize );
21527  }
21528  return createResultValueType( result, cacheData );
21529  }
21530 
21531  template <typename Uint8_tAllocator,
21532  typename Dispatch,
21533  typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type>
21535  Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
21536  {
21537  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21538 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21539  VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" );
21540 # endif
21541 
21542  std::vector<uint8_t, Uint8_tAllocator> cacheData( uint8_tAllocator );
21543  size_t cacheSize;
21545  do
21546  {
21547  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) );
21548  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize )
21549  {
21550  cacheData.resize( cacheSize );
21551  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
21552  d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) );
21553  }
21554  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
21555  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" );
21556  VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() );
21557  if ( cacheSize < cacheData.size() )
21558  {
21559  cacheData.resize( cacheSize );
21560  }
21561  return createResultValueType( result, cacheData );
21562  }
21563 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21564 
21565  template <typename Dispatch>
21566  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo,
21567  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21568  VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction,
21569  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21570  {
21571  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21572  return static_cast<Result>( d.vkCreateCudaFunctionNV( m_device,
21573  reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ),
21574  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
21575  reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) );
21576  }
21577 
21578 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21579  template <typename Dispatch>
21581  Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
21583  Dispatch const & d ) const
21584  {
21585  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21586 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21587  VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" );
21588 # endif
21589 
21590  VULKAN_HPP_NAMESPACE::CudaFunctionNV function;
21592  d.vkCreateCudaFunctionNV( m_device,
21593  reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ),
21594  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21595  reinterpret_cast<VkCudaFunctionNV *>( &function ) ) );
21596  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" );
21597 
21598  return createResultValueType( result, function );
21599  }
21600 
21601 # ifndef VULKAN_HPP_NO_SMART_HANDLE
21602  template <typename Dispatch>
21604  Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
21606  Dispatch const & d ) const
21607  {
21608  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21609 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21610  VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" );
21611 # endif
21612 
21613  VULKAN_HPP_NAMESPACE::CudaFunctionNV function;
21615  d.vkCreateCudaFunctionNV( m_device,
21616  reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ),
21617  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21618  reinterpret_cast<VkCudaFunctionNV *>( &function ) ) );
21619  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" );
21620 
21621  return createResultValueType(
21623  }
21624 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
21625 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21626 
21627  template <typename Dispatch>
21628  VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
21629  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21630  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21631  {
21632  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21633  d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
21634  }
21635 
21636 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21637  template <typename Dispatch>
21638  VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
21640  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21641  {
21642  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21643 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21644  VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" );
21645 # endif
21646 
21647  d.vkDestroyCudaModuleNV( m_device,
21648  static_cast<VkCudaModuleNV>( module ),
21649  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
21650  }
21651 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21652 
21653  template <typename Dispatch>
21654  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
21655  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21656  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21657  {
21658  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21659  d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
21660  }
21661 
21662 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21663  template <typename Dispatch>
21664  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
21666  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21667  {
21668  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21669 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21670  VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" );
21671 # endif
21672 
21673  d.vkDestroyCudaModuleNV( m_device,
21674  static_cast<VkCudaModuleNV>( module ),
21675  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
21676  }
21677 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21678 
21679  template <typename Dispatch>
21680  VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
21681  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21682  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21683  {
21684  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21685  d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
21686  }
21687 
21688 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21689  template <typename Dispatch>
21690  VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
21692  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21693  {
21694  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21695 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21696  VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" );
21697 # endif
21698 
21699  d.vkDestroyCudaFunctionNV( m_device,
21700  static_cast<VkCudaFunctionNV>( function ),
21701  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
21702  }
21703 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21704 
21705  template <typename Dispatch>
21706  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
21707  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21708  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21709  {
21710  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21711  d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
21712  }
21713 
21714 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21715  template <typename Dispatch>
21716  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
21718  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21719  {
21720  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21721 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21722  VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" );
21723 # endif
21724 
21725  d.vkDestroyCudaFunctionNV( m_device,
21726  static_cast<VkCudaFunctionNV>( function ),
21727  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
21728  }
21729 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21730 
21731  template <typename Dispatch>
21732  VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo,
21733  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21734  {
21735  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21736  d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) );
21737  }
21738 
21739 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21740  template <typename Dispatch>
21741  VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo,
21742  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21743  {
21744  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21745 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21746  VULKAN_HPP_ASSERT( d.vkCmdCudaLaunchKernelNV && "Function <vkCmdCudaLaunchKernelNV> requires <VK_NV_cuda_kernel_launch>" );
21747 # endif
21748 
21749  d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) );
21750  }
21751 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21752 #endif /*VK_ENABLE_BETA_EXTENSIONS*/
21753 
21754 #if defined( VK_USE_PLATFORM_METAL_EXT )
21755  //=== VK_EXT_metal_objects ===
21756 
21757  template <typename Dispatch>
21758  VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,
21759  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21760  {
21761  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21762  d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) );
21763  }
21764 
21765 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21766  template <typename Dispatch>
21767  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT
21768  Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21769  {
21770  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21771 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21772  VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" );
21773 # endif
21774 
21775  VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
21776  d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
21777 
21778  return metalObjectsInfo;
21779  }
21780 
21781  template <typename X, typename Y, typename... Z, typename Dispatch>
21782  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
21783  Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21784  {
21785  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21786 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21787  VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" );
21788 # endif
21789 
21790  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
21791  VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
21792  d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
21793 
21794  return structureChain;
21795  }
21796 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21797 #endif /*VK_USE_PLATFORM_METAL_EXT*/
21798 
21799  //=== VK_KHR_synchronization2 ===
21800 
21801  template <typename Dispatch>
21802  VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
21803  const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
21804  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21805  {
21806  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21807  d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
21808  }
21809 
21810 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21811  template <typename Dispatch>
21812  VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
21813  const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
21814  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21815  {
21816  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21817 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21818  VULKAN_HPP_ASSERT( d.vkCmdSetEvent2KHR && "Function <vkCmdSetEvent2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
21819 # endif
21820 
21821  d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
21822  }
21823 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21824 
21825  template <typename Dispatch>
21826  VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
21828  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21829  {
21830  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21831  d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
21832  }
21833 
21834  template <typename Dispatch>
21835  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount,
21836  const VULKAN_HPP_NAMESPACE::Event * pEvents,
21837  const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
21838  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21839  {
21840  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21841  d.vkCmdWaitEvents2KHR(
21842  m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
21843  }
21844 
21845 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21846  template <typename Dispatch>
21847  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
21848  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
21849  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
21850  {
21851  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21852 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21853  VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2KHR && "Function <vkCmdWaitEvents2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
21854 # endif
21855 # ifdef VULKAN_HPP_NO_EXCEPTIONS
21856  VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
21857 # else
21858  if ( events.size() != dependencyInfos.size() )
21859  {
21860  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
21861  }
21862 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
21863 
21864  d.vkCmdWaitEvents2KHR( m_commandBuffer,
21865  events.size(),
21866  reinterpret_cast<const VkEvent *>( events.data() ),
21867  reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
21868  }
21869 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21870 
21871  template <typename Dispatch>
21872  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
21873  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21874  {
21875  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21876  d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
21877  }
21878 
21879 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21880  template <typename Dispatch>
21881  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
21882  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21883  {
21884  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21885 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21886  VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2KHR && "Function <vkCmdPipelineBarrier2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
21887 # endif
21888 
21889  d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
21890  }
21891 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21892 
21893  template <typename Dispatch>
21894  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
21895  VULKAN_HPP_NAMESPACE::QueryPool queryPool,
21896  uint32_t query,
21897  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21898  {
21899  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21900  d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
21901  }
21902 
21903  template <typename Dispatch>
21904  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount,
21905  const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
21906  VULKAN_HPP_NAMESPACE::Fence fence,
21907  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21908  {
21909  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21910  return static_cast<Result>(
21911  d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
21912  }
21913 
21914 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21915  template <typename Dispatch>
21917  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
21918  {
21919  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21920 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21921  VULKAN_HPP_ASSERT( d.vkQueueSubmit2KHR && "Function <vkQueueSubmit2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
21922 # endif
21923 
21925  d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) );
21926  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
21927 
21928  return createResultValueType( result );
21929  }
21930 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21931 
21932  template <typename Dispatch>
21933  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
21934  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
21936  uint32_t marker,
21937  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21938  {
21939  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21940  d.vkCmdWriteBufferMarker2AMD(
21941  m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
21942  }
21943 
21944  template <typename Dispatch>
21945  VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount,
21946  VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
21947  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21948  {
21949  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21950  d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
21951  }
21952 
21953 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21954  template <typename CheckpointData2NVAllocator, typename Dispatch>
21955  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
21956  Queue::getCheckpointData2NV( Dispatch const & d ) const
21957  {
21958  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21959 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21960  VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_KHR_synchronization2>" );
21961 # endif
21962 
21963  std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
21964  uint32_t checkpointDataCount;
21965  d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
21966  checkpointData.resize( checkpointDataCount );
21967  d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
21968 
21969  VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
21970  if ( checkpointDataCount < checkpointData.size() )
21971  {
21972  checkpointData.resize( checkpointDataCount );
21973  }
21974  return checkpointData;
21975  }
21976 
21977  template <typename CheckpointData2NVAllocator,
21978  typename Dispatch,
21979  typename std::enable_if<std::is_same<typename CheckpointData2NVAllocator::value_type, VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, int>::type>
21980  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
21981  Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
21982  {
21983  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21984 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
21985  VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_KHR_synchronization2>" );
21986 # endif
21987 
21988  std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
21989  uint32_t checkpointDataCount;
21990  d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
21991  checkpointData.resize( checkpointDataCount );
21992  d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
21993 
21994  VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
21995  if ( checkpointDataCount < checkpointData.size() )
21996  {
21997  checkpointData.resize( checkpointDataCount );
21998  }
21999  return checkpointData;
22000  }
22001 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22002 
22003  //=== VK_EXT_descriptor_buffer ===
22004 
22005  template <typename Dispatch>
22006  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,
22007  VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes,
22008  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22009  {
22010  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22011  d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) );
22012  }
22013 
22014 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22015  template <typename Dispatch>
22017  Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22018  {
22019  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22020 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22021  VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSizeEXT && "Function <vkGetDescriptorSetLayoutSizeEXT> requires <VK_EXT_descriptor_buffer>" );
22022 # endif
22023 
22024  VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes;
22025  d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) );
22026 
22027  return layoutSizeInBytes;
22028  }
22029 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22030 
22031  template <typename Dispatch>
22032  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,
22033  uint32_t binding,
22035  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22036  {
22037  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22038  d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) );
22039  }
22040 
22041 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22042  template <typename Dispatch>
22043  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT(
22044  VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22045  {
22046  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22047 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22048  VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutBindingOffsetEXT &&
22049  "Function <vkGetDescriptorSetLayoutBindingOffsetEXT> requires <VK_EXT_descriptor_buffer>" );
22050 # endif
22051 
22053  d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) );
22054 
22055  return offset;
22056  }
22057 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22058 
22059  template <typename Dispatch>
22060  VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo,
22061  size_t dataSize,
22062  void * pDescriptor,
22063  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22064  {
22065  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22066  d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor );
22067  }
22068 
22069 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22070  template <typename Dispatch>
22071  VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,
22072  size_t dataSize,
22073  void * pDescriptor,
22074  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22075  {
22076  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22077 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22078  VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" );
22079 # endif
22080 
22081  d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), dataSize, pDescriptor );
22082  }
22083 
22084  template <typename DescriptorType, typename Dispatch>
22085  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,
22086  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22087  {
22088  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22089 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22090  VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" );
22091 # endif
22092 
22093  DescriptorType descriptor;
22094  d.vkGetDescriptorEXT(
22095  m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) );
22096 
22097  return descriptor;
22098  }
22099 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22100 
22101  template <typename Dispatch>
22102  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount,
22103  const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos,
22104  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22105  {
22106  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22107  d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) );
22108  }
22109 
22110 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22111  template <typename Dispatch>
22112  VULKAN_HPP_INLINE void
22113  CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos,
22114  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22115  {
22116  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22117 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22118  VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBuffersEXT && "Function <vkCmdBindDescriptorBuffersEXT> requires <VK_EXT_descriptor_buffer>" );
22119 # endif
22120 
22121  d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) );
22122  }
22123 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22124 
22125  template <typename Dispatch>
22126  VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
22127  VULKAN_HPP_NAMESPACE::PipelineLayout layout,
22128  uint32_t firstSet,
22129  uint32_t setCount,
22130  const uint32_t * pBufferIndices,
22131  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
22132  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22133  {
22134  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22135  d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer,
22136  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
22137  static_cast<VkPipelineLayout>( layout ),
22138  firstSet,
22139  setCount,
22140  pBufferIndices,
22141  reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
22142  }
22143 
22144 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22145  template <typename Dispatch>
22146  VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
22147  VULKAN_HPP_NAMESPACE::PipelineLayout layout,
22148  uint32_t firstSet,
22149  VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,
22150  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
22151  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
22152  {
22153  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22154 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22155  VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsetsEXT && "Function <vkCmdSetDescriptorBufferOffsetsEXT> requires <VK_EXT_descriptor_buffer>" );
22156 # endif
22157 # ifdef VULKAN_HPP_NO_EXCEPTIONS
22158  VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() );
22159 # else
22160  if ( bufferIndices.size() != offsets.size() )
22161  {
22162  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" );
22163  }
22164 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
22165 
22166  d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer,
22167  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
22168  static_cast<VkPipelineLayout>( layout ),
22169  firstSet,
22170  bufferIndices.size(),
22171  bufferIndices.data(),
22172  reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
22173  }
22174 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22175 
22176  template <typename Dispatch>
22177  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
22178  VULKAN_HPP_NAMESPACE::PipelineLayout layout,
22179  uint32_t set,
22180  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22181  {
22182  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22183  d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT(
22184  m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
22185  }
22186 
22187  template <typename Dispatch>
22188  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT(
22189  const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22190  {
22191  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22192  return static_cast<Result>(
22193  d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
22194  }
22195 
22196 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22197  template <typename DataType, typename Dispatch>
22199  Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
22200  {
22201  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22202 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22203  VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureDescriptorDataEXT &&
22204  "Function <vkGetBufferOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
22205 # endif
22206 
22207  DataType data;
22209  d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
22210  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" );
22211 
22212  return createResultValueType( result, data );
22213  }
22214 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22215 
22216  template <typename Dispatch>
22217  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT(
22218  const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22219  {
22220  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22221  return static_cast<Result>(
22222  d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
22223  }
22224 
22225 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22226  template <typename DataType, typename Dispatch>
22228  Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
22229  {
22230  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22231 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22232  VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDescriptorDataEXT &&
22233  "Function <vkGetImageOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
22234 # endif
22235 
22236  DataType data;
22238  d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
22239  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" );
22240 
22241  return createResultValueType( result, data );
22242  }
22243 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22244 
22245  template <typename Dispatch>
22246  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT(
22247  const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22248  {
22249  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22250  return static_cast<Result>(
22251  d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
22252  }
22253 
22254 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22255  template <typename DataType, typename Dispatch>
22257  Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
22258  {
22259  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22260 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22261  VULKAN_HPP_ASSERT( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT &&
22262  "Function <vkGetImageViewOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
22263 # endif
22264 
22265  DataType data;
22267  d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
22268  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" );
22269 
22270  return createResultValueType( result, data );
22271  }
22272 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22273 
22274  template <typename Dispatch>
22275  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT(
22276  const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22277  {
22278  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22279  return static_cast<Result>(
22280  d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
22281  }
22282 
22283 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22284  template <typename DataType, typename Dispatch>
22286  Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
22287  {
22288  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22289 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22290  VULKAN_HPP_ASSERT( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT &&
22291  "Function <vkGetSamplerOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
22292 # endif
22293 
22294  DataType data;
22296  d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
22297  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" );
22298 
22299  return createResultValueType( result, data );
22300  }
22301 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22302 
22303  template <typename Dispatch>
22304  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT(
22305  const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22306  {
22307  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22308  return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
22309  m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
22310  }
22311 
22312 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22313  template <typename DataType, typename Dispatch>
22315  Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info,
22316  Dispatch const & d ) const
22317  {
22318  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22319 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22320  VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT &&
22321  "Function <vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
22322 # endif
22323 
22324  DataType data;
22325  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
22326  m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
22327  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" );
22328 
22329  return createResultValueType( result, data );
22330  }
22331 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22332 
22333  //=== VK_NV_fragment_shading_rate_enums ===
22334 
22335  template <typename Dispatch>
22336  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
22338  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22339  {
22340  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22341  d.vkCmdSetFragmentShadingRateEnumNV(
22342  m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
22343  }
22344 
22345  //=== VK_EXT_mesh_shader ===
22346 
22347  template <typename Dispatch>
22348  VULKAN_HPP_INLINE void
22349  CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22350  {
22351  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22352  d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
22353  }
22354 
22355  template <typename Dispatch>
22356  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
22358  uint32_t drawCount,
22359  uint32_t stride,
22360  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22361  {
22362  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22363  d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
22364  }
22365 
22366  template <typename Dispatch>
22367  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
22369  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
22370  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
22371  uint32_t maxDrawCount,
22372  uint32_t stride,
22373  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22374  {
22375  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22376  d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer,
22377  static_cast<VkBuffer>( buffer ),
22378  static_cast<VkDeviceSize>( offset ),
22379  static_cast<VkBuffer>( countBuffer ),
22380  static_cast<VkDeviceSize>( countBufferOffset ),
22381  maxDrawCount,
22382  stride );
22383  }
22384 
22385  //=== VK_KHR_copy_commands2 ===
22386 
22387  template <typename Dispatch>
22388  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
22389  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22390  {
22391  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22392  d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
22393  }
22394 
22395 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22396  template <typename Dispatch>
22397  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
22398  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22399  {
22400  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22401 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22402  VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2KHR && "Function <vkCmdCopyBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
22403 # endif
22404 
22405  d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
22406  }
22407 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22408 
22409  template <typename Dispatch>
22410  VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,
22411  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22412  {
22413  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22414  d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
22415  }
22416 
22417 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22418  template <typename Dispatch>
22419  VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,
22420  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22421  {
22422  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22423 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22424  VULKAN_HPP_ASSERT( d.vkCmdCopyImage2KHR && "Function <vkCmdCopyImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
22425 # endif
22426 
22427  d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
22428  }
22429 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22430 
22431  template <typename Dispatch>
22432  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
22433  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22434  {
22435  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22436  d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
22437  }
22438 
22439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22440  template <typename Dispatch>
22441  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
22442  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22443  {
22444  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22445 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22446  VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2KHR && "Function <vkCmdCopyBufferToImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
22447 # endif
22448 
22449  d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
22450  }
22451 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22452 
22453  template <typename Dispatch>
22454  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
22455  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22456  {
22457  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22458  d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
22459  }
22460 
22461 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22462  template <typename Dispatch>
22463  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
22464  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22465  {
22466  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22467 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22468  VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2KHR && "Function <vkCmdCopyImageToBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
22469 # endif
22470 
22471  d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
22472  }
22473 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22474 
22475  template <typename Dispatch>
22476  VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,
22477  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22478  {
22479  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22480  d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
22481  }
22482 
22483 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22484  template <typename Dispatch>
22485  VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,
22486  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22487  {
22488  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22489 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22490  VULKAN_HPP_ASSERT( d.vkCmdBlitImage2KHR && "Function <vkCmdBlitImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
22491 # endif
22492 
22493  d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
22494  }
22495 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22496 
22497  template <typename Dispatch>
22498  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
22499  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22500  {
22501  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22502  d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
22503  }
22504 
22505 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22506  template <typename Dispatch>
22507  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
22508  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22509  {
22510  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22511 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22512  VULKAN_HPP_ASSERT( d.vkCmdResolveImage2KHR && "Function <vkCmdResolveImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
22513 # endif
22514 
22515  d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
22516  }
22517 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22518 
22519  //=== VK_EXT_device_fault ===
22520 
22521  template <typename Dispatch>
22522  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,
22523  VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,
22524  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22525  {
22526  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22527  return static_cast<Result>( d.vkGetDeviceFaultInfoEXT(
22528  m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
22529  }
22530 
22531 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22532  template <typename Dispatch>
22534  Device::getFaultInfoEXT( Dispatch const & d ) const
22535  {
22536  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22537 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22538  VULKAN_HPP_ASSERT( d.vkGetDeviceFaultInfoEXT && "Function <vkGetDeviceFaultInfoEXT> requires <VK_EXT_device_fault>" );
22539 # endif
22540 
22541  std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data_;
22542  VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data_.first;
22543  VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data_.second;
22544  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceFaultInfoEXT(
22545  m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) ) );
22546  resultCheck(
22547  result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
22548 
22550  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
22551  }
22552 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22553 
22554 #if defined( VK_USE_PLATFORM_WIN32_KHR )
22555  //=== VK_NV_acquire_winrt_display ===
22556 
22557 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
22558  template <typename Dispatch>
22559  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
22560  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22561  {
22562  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22563  return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
22564  }
22565 # else
22566  template <typename Dispatch>
22567  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
22568  PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
22569  {
22570  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22571 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22572  VULKAN_HPP_ASSERT( d.vkAcquireWinrtDisplayNV && "Function <vkAcquireWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" );
22573 # endif
22574 
22576  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
22577  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
22578 
22579  return createResultValueType( result );
22580  }
22581 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22582 
22583  template <typename Dispatch>
22584  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId,
22585  VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
22586  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22587  {
22588  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22589  return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
22590  }
22591 
22592 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22593  template <typename Dispatch>
22594  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
22595  PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
22596  {
22597  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22598 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22599  VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" );
22600 # endif
22601 
22602  VULKAN_HPP_NAMESPACE::DisplayKHR display;
22604  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
22605  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
22606 
22607  return createResultValueType( result, display );
22608  }
22609 
22610 # ifndef VULKAN_HPP_NO_SMART_HANDLE
22611  template <typename Dispatch>
22612  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
22613  PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
22614  {
22615  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22616 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22617  VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" );
22618 # endif
22619 
22620  VULKAN_HPP_NAMESPACE::DisplayKHR display;
22622  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
22623  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" );
22624 
22625  return createResultValueType( result,
22626  UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
22627  }
22628 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
22629 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22630 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
22631 
22632 #if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
22633  //=== VK_EXT_directfb_surface ===
22634 
22635  template <typename Dispatch>
22636  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
22637  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
22638  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
22639  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22640  {
22641  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22642  return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance,
22643  reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
22644  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
22645  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
22646  }
22647 
22648 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22649  template <typename Dispatch>
22650  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
22651  Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
22652  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
22653  Dispatch const & d ) const
22654  {
22655  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22656 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22657  VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" );
22658 # endif
22659 
22660  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
22661  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT(
22662  m_instance,
22663  reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
22664  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
22665  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
22666  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
22667 
22668  return createResultValueType( result, surface );
22669  }
22670 
22671 # ifndef VULKAN_HPP_NO_SMART_HANDLE
22672  template <typename Dispatch>
22673  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
22674  Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
22675  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
22676  Dispatch const & d ) const
22677  {
22678  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22679 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22680  VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" );
22681 # endif
22682 
22683  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
22684  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT(
22685  m_instance,
22686  reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
22687  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
22688  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
22689  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" );
22690 
22691  return createResultValueType(
22692  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
22693  }
22694 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
22695 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22696 
22697  template <typename Dispatch>
22698  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
22699  IDirectFB * dfb,
22700  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22701  {
22702  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22703  return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
22704  }
22705 
22706 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22707  template <typename Dispatch>
22709  PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22710  {
22711  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22712 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22713  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT &&
22714  "Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> requires <VK_EXT_directfb_surface>" );
22715 # endif
22716 
22717  VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
22718 
22719  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
22720  }
22721 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22722 #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
22723 
22724  //=== VK_EXT_vertex_input_dynamic_state ===
22725 
22726  template <typename Dispatch>
22727  VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount,
22728  const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
22729  uint32_t vertexAttributeDescriptionCount,
22730  const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
22731  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22732  {
22733  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22734  d.vkCmdSetVertexInputEXT( m_commandBuffer,
22735  vertexBindingDescriptionCount,
22736  reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
22737  vertexAttributeDescriptionCount,
22738  reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
22739  }
22740 
22741 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22742  template <typename Dispatch>
22743  VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
22744  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
22745  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
22746  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22747  {
22748  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22749 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22750  VULKAN_HPP_ASSERT( d.vkCmdSetVertexInputEXT && "Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" );
22751 # endif
22752 
22753  d.vkCmdSetVertexInputEXT( m_commandBuffer,
22754  vertexBindingDescriptions.size(),
22755  reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
22756  vertexAttributeDescriptions.size(),
22757  reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
22758  }
22759 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22760 
22761 #if defined( VK_USE_PLATFORM_FUCHSIA )
22762  //=== VK_FUCHSIA_external_memory ===
22763 
22764  template <typename Dispatch>
22766  Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
22767  zx_handle_t * pZirconHandle,
22768  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22769  {
22770  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22771  return static_cast<Result>(
22772  d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
22773  }
22774 
22775 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22776  template <typename Dispatch>
22777  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
22778  Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
22779  {
22780  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22781 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22782  VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandleFUCHSIA && "Function <vkGetMemoryZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_memory>" );
22783 # endif
22784 
22785  zx_handle_t zirconHandle;
22787  d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) );
22788  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
22789 
22790  return createResultValueType( result, zirconHandle );
22791  }
22792 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22793 
22794  template <typename Dispatch>
22796  Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
22797  zx_handle_t zirconHandle,
22798  VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
22799  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22800  {
22801  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22802  return static_cast<Result>(
22803  d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
22804  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
22805  zirconHandle,
22806  reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
22807  }
22808 
22809 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22810  template <typename Dispatch>
22811  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
22812  Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
22813  zx_handle_t zirconHandle,
22814  Dispatch const & d ) const
22815  {
22816  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22817 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22818  VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandlePropertiesFUCHSIA &&
22819  "Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> requires <VK_FUCHSIA_external_memory>" );
22820 # endif
22821 
22822  VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
22824  d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
22825  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
22826  zirconHandle,
22827  reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) );
22828  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
22829 
22830  return createResultValueType( result, memoryZirconHandleProperties );
22831  }
22832 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22833 #endif /*VK_USE_PLATFORM_FUCHSIA*/
22834 
22835 #if defined( VK_USE_PLATFORM_FUCHSIA )
22836  //=== VK_FUCHSIA_external_semaphore ===
22837 
22838  template <typename Dispatch>
22839  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA(
22840  const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22841  {
22842  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22843  return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
22844  m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
22845  }
22846 
22847 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22848  template <typename Dispatch>
22849  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
22850  Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
22851  Dispatch const & d ) const
22852  {
22853  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22854 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22855  VULKAN_HPP_ASSERT( d.vkImportSemaphoreZirconHandleFUCHSIA && "Function <vkImportSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" );
22856 # endif
22857 
22858  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
22859  m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) );
22860  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
22861 
22862  return createResultValueType( result );
22863  }
22864 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22865 
22866  template <typename Dispatch>
22868  Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
22869  zx_handle_t * pZirconHandle,
22870  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22871  {
22872  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22873  return static_cast<Result>(
22874  d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
22875  }
22876 
22877 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22878  template <typename Dispatch>
22879  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
22880  Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
22881  {
22882  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22883 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22884  VULKAN_HPP_ASSERT( d.vkGetSemaphoreZirconHandleFUCHSIA && "Function <vkGetSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" );
22885 # endif
22886 
22887  zx_handle_t zirconHandle;
22889  d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) );
22890  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
22891 
22892  return createResultValueType( result, zirconHandle );
22893  }
22894 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22895 #endif /*VK_USE_PLATFORM_FUCHSIA*/
22896 
22897 #if defined( VK_USE_PLATFORM_FUCHSIA )
22898  //=== VK_FUCHSIA_buffer_collection ===
22899 
22900  template <typename Dispatch>
22902  Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,
22903  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
22904  VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,
22905  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22906  {
22907  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22908  return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device,
22909  reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ),
22910  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
22911  reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) );
22912  }
22913 
22914 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22915  template <typename Dispatch>
22916  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type
22917  Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
22918  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
22919  Dispatch const & d ) const
22920  {
22921  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22922 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22923  VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
22924 # endif
22925 
22926  VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
22927  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA(
22928  m_device,
22929  reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
22930  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
22931  reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) );
22932  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" );
22933 
22934  return createResultValueType( result, collection );
22935  }
22936 
22937 # ifndef VULKAN_HPP_NO_SMART_HANDLE
22938  template <typename Dispatch>
22939  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type
22940  Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
22941  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
22942  Dispatch const & d ) const
22943  {
22944  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22945 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22946  VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
22947 # endif
22948 
22949  VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
22950  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA(
22951  m_device,
22952  reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
22953  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
22954  reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) );
22955  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" );
22956 
22957  return createResultValueType(
22958  result, UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
22959  }
22960 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
22961 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22962 
22963  template <typename Dispatch>
22965  Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
22966  const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,
22967  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
22968  {
22969  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22970  return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA(
22971  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
22972  }
22973 
22974 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22975  template <typename Dispatch>
22976  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
22977  Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
22978  const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,
22979  Dispatch const & d ) const
22980  {
22981  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
22982 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
22983  VULKAN_HPP_ASSERT( d.vkSetBufferCollectionImageConstraintsFUCHSIA &&
22984  "Function <vkSetBufferCollectionImageConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
22985 # endif
22986 
22987  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA(
22988  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) ) );
22989  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" );
22990 
22991  return createResultValueType( result );
22992  }
22993 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
22994 
22995  template <typename Dispatch>
22997  Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
22998  const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,
22999  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23000  {
23001  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23002  return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
23003  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
23004  }
23005 
23006 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23007  template <typename Dispatch>
23008  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
23009  Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
23010  const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,
23011  Dispatch const & d ) const
23012  {
23013  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23014 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23015  VULKAN_HPP_ASSERT( d.vkSetBufferCollectionBufferConstraintsFUCHSIA &&
23016  "Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
23017 # endif
23018 
23019  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
23020  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) ) );
23021  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" );
23022 
23023  return createResultValueType( result );
23024  }
23025 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23026 
23027  template <typename Dispatch>
23028  VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
23029  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
23030  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23031  {
23032  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23033  d.vkDestroyBufferCollectionFUCHSIA(
23034  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
23035  }
23036 
23037 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23038  template <typename Dispatch>
23039  VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
23040  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
23041  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23042  {
23043  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23044 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23045  VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
23046 # endif
23047 
23048  d.vkDestroyBufferCollectionFUCHSIA(
23049  m_device,
23050  static_cast<VkBufferCollectionFUCHSIA>( collection ),
23051  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
23052  }
23053 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23054 
23055  template <typename Dispatch>
23056  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
23057  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
23058  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23059  {
23060  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23061  d.vkDestroyBufferCollectionFUCHSIA(
23062  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
23063  }
23064 
23065 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23066  template <typename Dispatch>
23067  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
23068  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
23069  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23070  {
23071  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23072 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23073  VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
23074 # endif
23075 
23076  d.vkDestroyBufferCollectionFUCHSIA(
23077  m_device,
23078  static_cast<VkBufferCollectionFUCHSIA>( collection ),
23079  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
23080  }
23081 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23082 
23083  template <typename Dispatch>
23085  Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
23086  VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,
23087  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23088  {
23089  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23090  return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA(
23091  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
23092  }
23093 
23094 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23095  template <typename Dispatch>
23096  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type
23097  Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const
23098  {
23099  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23100 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23101  VULKAN_HPP_ASSERT( d.vkGetBufferCollectionPropertiesFUCHSIA &&
23102  "Function <vkGetBufferCollectionPropertiesFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
23103 # endif
23104 
23105  VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
23106  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetBufferCollectionPropertiesFUCHSIA(
23107  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) ) );
23108  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" );
23109 
23110  return createResultValueType( result, properties );
23111  }
23112 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23113 #endif /*VK_USE_PLATFORM_FUCHSIA*/
23114 
23115  //=== VK_HUAWEI_subpass_shading ===
23116 
23117  template <typename Dispatch>
23118  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass,
23119  VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,
23120  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23121  {
23122  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23123  return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
23124  m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) );
23125  }
23126 
23127 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23128  template <typename Dispatch>
23130  Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const
23131  {
23132  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23133 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23134  VULKAN_HPP_ASSERT( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI &&
23135  "Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> requires <VK_HUAWEI_subpass_shading>" );
23136 # endif
23137 
23138  VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
23139  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
23140  m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) );
23141  resultCheck( result,
23142  VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI",
23143  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
23144 
23145  return ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize );
23146  }
23147 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23148 
23149  template <typename Dispatch>
23150  VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23151  {
23152  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23153  d.vkCmdSubpassShadingHUAWEI( m_commandBuffer );
23154  }
23155 
23156  //=== VK_HUAWEI_invocation_mask ===
23157 
23158  template <typename Dispatch>
23159  VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
23161  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23162  {
23163  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23164  d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
23165  }
23166 
23167  //=== VK_NV_external_memory_rdma ===
23168 
23169  template <typename Dispatch>
23171  Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,
23173  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23174  {
23175  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23176  return static_cast<Result>( d.vkGetMemoryRemoteAddressNV(
23177  m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
23178  }
23179 
23180 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23181  template <typename Dispatch>
23183  Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const
23184  {
23185  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23186 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23187  VULKAN_HPP_ASSERT( d.vkGetMemoryRemoteAddressNV && "Function <vkGetMemoryRemoteAddressNV> requires <VK_NV_external_memory_rdma>" );
23188 # endif
23189 
23191  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryRemoteAddressNV(
23192  m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) ) );
23193  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
23194 
23195  return createResultValueType( result, address );
23196  }
23197 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23198 
23199  //=== VK_EXT_pipeline_properties ===
23200 
23201  template <typename Dispatch>
23202  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo,
23203  VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,
23204  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23205  {
23206  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23207  return static_cast<Result>( d.vkGetPipelinePropertiesEXT(
23208  m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) );
23209  }
23210 
23211 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23212  template <typename Dispatch>
23214  Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const
23215  {
23216  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23217 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23218  VULKAN_HPP_ASSERT( d.vkGetPipelinePropertiesEXT && "Function <vkGetPipelinePropertiesEXT> requires <VK_EXT_pipeline_properties>" );
23219 # endif
23220 
23221  VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
23222  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelinePropertiesEXT(
23223  m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) ) );
23224  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
23225 
23226  return createResultValueType( result, pipelineProperties );
23227  }
23228 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23229 
23230  //=== VK_EXT_extended_dynamic_state2 ===
23231 
23232  template <typename Dispatch>
23233  VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23234  {
23235  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23236  d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
23237  }
23238 
23239  template <typename Dispatch>
23240  VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
23241  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23242  {
23243  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23244  d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
23245  }
23246 
23247  template <typename Dispatch>
23248  VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23249  {
23250  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23251  d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
23252  }
23253 
23254  template <typename Dispatch>
23255  VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23256  {
23257  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23258  d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
23259  }
23260 
23261  template <typename Dispatch>
23262  VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
23263  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23264  {
23265  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23266  d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
23267  }
23268 
23269 #if defined( VK_USE_PLATFORM_SCREEN_QNX )
23270  //=== VK_QNX_screen_surface ===
23271 
23272  template <typename Dispatch>
23273  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
23274  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
23275  VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
23276  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23277  {
23278  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23279  return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance,
23280  reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
23281  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
23282  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
23283  }
23284 
23285 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23286  template <typename Dispatch>
23287  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
23288  Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
23289  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
23290  Dispatch const & d ) const
23291  {
23292  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23293 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23294  VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" );
23295 # endif
23296 
23297  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
23298  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX(
23299  m_instance,
23300  reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
23301  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
23302  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
23303  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
23304 
23305  return createResultValueType( result, surface );
23306  }
23307 
23308 # ifndef VULKAN_HPP_NO_SMART_HANDLE
23309  template <typename Dispatch>
23310  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
23311  Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
23312  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
23313  Dispatch const & d ) const
23314  {
23315  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23316 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23317  VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" );
23318 # endif
23319 
23320  VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
23321  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX(
23322  m_instance,
23323  reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
23324  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
23325  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
23326  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" );
23327 
23328  return createResultValueType(
23329  result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
23330  }
23331 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
23332 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23333 
23334  template <typename Dispatch>
23335  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
23336  struct _screen_window * window,
23337  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23338  {
23339  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23340  return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
23341  }
23342 
23343 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23344  template <typename Dispatch>
23346  PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23347  {
23348  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23349 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23350  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceScreenPresentationSupportQNX &&
23351  "Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> requires <VK_QNX_screen_surface>" );
23352 # endif
23353 
23354  VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
23355 
23356  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
23357  }
23358 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23359 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/
23360 
23361  //=== VK_EXT_color_write_enable ===
23362 
23363  template <typename Dispatch>
23364  VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount,
23365  const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
23366  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23367  {
23368  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23369  d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
23370  }
23371 
23372 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23373  template <typename Dispatch>
23374  VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
23375  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23376  {
23377  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23378 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23379  VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteEnableEXT && "Function <vkCmdSetColorWriteEnableEXT> requires <VK_EXT_color_write_enable>" );
23380 # endif
23381 
23382  d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
23383  }
23384 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23385 
23386  //=== VK_KHR_ray_tracing_maintenance1 ===
23387 
23388  template <typename Dispatch>
23389  VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
23390  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23391  {
23392  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23393  d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
23394  }
23395 
23396  //=== VK_EXT_multi_draw ===
23397 
23398  template <typename Dispatch>
23399  VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount,
23400  const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo,
23401  uint32_t instanceCount,
23402  uint32_t firstInstance,
23403  uint32_t stride,
23404  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23405  {
23406  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23407  d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride );
23408  }
23409 
23410 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23411  template <typename Dispatch>
23412  VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
23413  uint32_t instanceCount,
23414  uint32_t firstInstance,
23415  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23416  {
23417  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23418 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23419  VULKAN_HPP_ASSERT( d.vkCmdDrawMultiEXT && "Function <vkCmdDrawMultiEXT> requires <VK_EXT_multi_draw>" );
23420 # endif
23421 
23422  d.vkCmdDrawMultiEXT( m_commandBuffer,
23423  vertexInfo.size(),
23424  reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ),
23425  instanceCount,
23426  firstInstance,
23427  vertexInfo.stride() );
23428  }
23429 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23430 
23431  template <typename Dispatch>
23432  VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount,
23433  const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,
23434  uint32_t instanceCount,
23435  uint32_t firstInstance,
23436  uint32_t stride,
23437  const int32_t * pVertexOffset,
23438  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23439  {
23440  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23441  d.vkCmdDrawMultiIndexedEXT(
23442  m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset );
23443  }
23444 
23445 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23446  template <typename Dispatch>
23447  VULKAN_HPP_INLINE void
23448  CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
23449  uint32_t instanceCount,
23450  uint32_t firstInstance,
23451  Optional<const int32_t> vertexOffset,
23452  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23453  {
23454  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23455 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23456  VULKAN_HPP_ASSERT( d.vkCmdDrawMultiIndexedEXT && "Function <vkCmdDrawMultiIndexedEXT> requires <VK_EXT_multi_draw>" );
23457 # endif
23458 
23459  d.vkCmdDrawMultiIndexedEXT( m_commandBuffer,
23460  indexInfo.size(),
23461  reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ),
23462  instanceCount,
23463  firstInstance,
23464  indexInfo.stride(),
23465  static_cast<const int32_t *>( vertexOffset ) );
23466  }
23467 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23468 
23469  //=== VK_EXT_opacity_micromap ===
23470 
23471  template <typename Dispatch>
23472  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,
23473  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
23474  VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,
23475  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23476  {
23477  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23478  return static_cast<Result>( d.vkCreateMicromapEXT( m_device,
23479  reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ),
23480  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
23481  reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) );
23482  }
23483 
23484 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23485  template <typename Dispatch>
23487  Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
23489  Dispatch const & d ) const
23490  {
23491  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23492 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23493  VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" );
23494 # endif
23495 
23496  VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
23498  d.vkCreateMicromapEXT( m_device,
23499  reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
23500  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
23501  reinterpret_cast<VkMicromapEXT *>( &micromap ) ) );
23502  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" );
23503 
23504  return createResultValueType( result, micromap );
23505  }
23506 
23507 # ifndef VULKAN_HPP_NO_SMART_HANDLE
23508  template <typename Dispatch>
23510  Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
23512  Dispatch const & d ) const
23513  {
23514  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23515 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23516  VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" );
23517 # endif
23518 
23519  VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
23521  d.vkCreateMicromapEXT( m_device,
23522  reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
23523  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
23524  reinterpret_cast<VkMicromapEXT *>( &micromap ) ) );
23525  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" );
23526 
23527  return createResultValueType(
23529  }
23530 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
23531 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23532 
23533  template <typename Dispatch>
23534  VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
23535  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
23536  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23537  {
23538  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23539  d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
23540  }
23541 
23542 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23543  template <typename Dispatch>
23544  VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
23546  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23547  {
23548  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23549 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23550  VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" );
23551 # endif
23552 
23553  d.vkDestroyMicromapEXT( m_device,
23554  static_cast<VkMicromapEXT>( micromap ),
23555  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
23556  }
23557 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23558 
23559  template <typename Dispatch>
23560  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
23561  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
23562  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23563  {
23564  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23565  d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
23566  }
23567 
23568 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23569  template <typename Dispatch>
23570  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
23572  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23573  {
23574  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23575 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23576  VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" );
23577 # endif
23578 
23579  d.vkDestroyMicromapEXT( m_device,
23580  static_cast<VkMicromapEXT>( micromap ),
23581  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
23582  }
23583 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23584 
23585  template <typename Dispatch>
23586  VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount,
23587  const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
23588  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23589  {
23590  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23591  d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) );
23592  }
23593 
23594 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23595  template <typename Dispatch>
23596  VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
23597  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23598  {
23599  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23600 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23601  VULKAN_HPP_ASSERT( d.vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" );
23602 # endif
23603 
23604  d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
23605  }
23606 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23607 
23608  template <typename Dispatch>
23609  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
23610  uint32_t infoCount,
23611  const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
23612  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23613  {
23614  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23615  return static_cast<Result>( d.vkBuildMicromapsEXT(
23616  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) );
23617  }
23618 
23619 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23620  template <typename Dispatch>
23622  Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
23623  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
23624  Dispatch const & d ) const
23625  {
23626  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23627 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23628  VULKAN_HPP_ASSERT( d.vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" );
23629 # endif
23630 
23631  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBuildMicromapsEXT(
23632  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ) );
23633  resultCheck(
23634  result,
23635  VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT",
23636  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
23637 
23638  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
23639  }
23640 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23641 
23642  template <typename Dispatch>
23643  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
23644  const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,
23645  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23646  {
23647  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23648  return static_cast<Result>(
23649  d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) );
23650  }
23651 
23652 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23653  template <typename Dispatch>
23654  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
23655  const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,
23656  Dispatch const & d ) const
23657  {
23658  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23659 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23660  VULKAN_HPP_ASSERT( d.vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" );
23661 # endif
23662 
23664  d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ) );
23665  resultCheck(
23666  result,
23667  VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT",
23668  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
23669 
23670  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
23671  }
23672 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23673 
23674  template <typename Dispatch>
23675  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
23676  const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
23677  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23678  {
23679  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23680  return static_cast<Result>( d.vkCopyMicromapToMemoryEXT(
23681  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) );
23682  }
23683 
23684 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23685  template <typename Dispatch>
23687  VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const
23688  {
23689  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23690 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23691  VULKAN_HPP_ASSERT( d.vkCopyMicromapToMemoryEXT && "Function <vkCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" );
23692 # endif
23693 
23694  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMicromapToMemoryEXT(
23695  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ) );
23696  resultCheck(
23697  result,
23698  VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT",
23699  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
23700 
23701  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
23702  }
23703 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23704 
23705  template <typename Dispatch>
23706  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
23707  const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
23708  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23709  {
23710  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23711  return static_cast<Result>( d.vkCopyMemoryToMicromapEXT(
23712  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) );
23713  }
23714 
23715 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23716  template <typename Dispatch>
23718  VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const
23719  {
23720  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23721 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23722  VULKAN_HPP_ASSERT( d.vkCopyMemoryToMicromapEXT && "Function <vkCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" );
23723 # endif
23724 
23725  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToMicromapEXT(
23726  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ) );
23727  resultCheck(
23728  result,
23729  VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT",
23730  { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
23731 
23732  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
23733  }
23734 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23735 
23736  template <typename Dispatch>
23737  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount,
23738  const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
23740  size_t dataSize,
23741  void * pData,
23742  size_t stride,
23743  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23744  {
23745  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23746  return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT(
23747  m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
23748  }
23749 
23750 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23751  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
23753  Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
23755  size_t dataSize,
23756  size_t stride,
23757  Dispatch const & d ) const
23758  {
23759  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23760 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23761  VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" );
23762 # endif
23763 
23764  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
23765  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
23767  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device,
23768  micromaps.size(),
23769  reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
23770  static_cast<VkQueryType>( queryType ),
23771  data.size() * sizeof( DataType ),
23772  reinterpret_cast<void *>( data.data() ),
23773  stride ) );
23774  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
23775 
23776  return createResultValueType( result, data );
23777  }
23778 
23779  template <typename DataType, typename Dispatch>
23781  Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
23783  size_t stride,
23784  Dispatch const & d ) const
23785  {
23786  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23787 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23788  VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" );
23789 # endif
23790 
23791  DataType data;
23793  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device,
23794  micromaps.size(),
23795  reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
23796  static_cast<VkQueryType>( queryType ),
23797  sizeof( DataType ),
23798  reinterpret_cast<void *>( &data ),
23799  stride ) );
23800  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
23801 
23802  return createResultValueType( result, data );
23803  }
23804 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23805 
23806  template <typename Dispatch>
23807  VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23808  {
23809  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23810  d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) );
23811  }
23812 
23813 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23814  template <typename Dispatch>
23815  VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23816  {
23817  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23818 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23819  VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" );
23820 # endif
23821 
23822  d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
23823  }
23824 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23825 
23826  template <typename Dispatch>
23827  VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
23828  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23829  {
23830  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23831  d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) );
23832  }
23833 
23834 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23835  template <typename Dispatch>
23836  VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,
23837  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23838  {
23839  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23840 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23841  VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapToMemoryEXT && "Function <vkCmdCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" );
23842 # endif
23843 
23844  d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
23845  }
23846 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23847 
23848  template <typename Dispatch>
23849  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
23850  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23851  {
23852  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23853  d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) );
23854  }
23855 
23856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23857  template <typename Dispatch>
23858  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,
23859  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23860  {
23861  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23862 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23863  VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToMicromapEXT && "Function <vkCmdCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" );
23864 # endif
23865 
23866  d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
23867  }
23868 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23869 
23870  template <typename Dispatch>
23871  VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount,
23872  const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
23874  VULKAN_HPP_NAMESPACE::QueryPool queryPool,
23875  uint32_t firstQuery,
23876  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23877  {
23878  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23879  d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
23880  micromapCount,
23881  reinterpret_cast<const VkMicromapEXT *>( pMicromaps ),
23882  static_cast<VkQueryType>( queryType ),
23883  static_cast<VkQueryPool>( queryPool ),
23884  firstQuery );
23885  }
23886 
23887 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23888  template <typename Dispatch>
23889  VULKAN_HPP_INLINE void
23890  CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
23892  VULKAN_HPP_NAMESPACE::QueryPool queryPool,
23893  uint32_t firstQuery,
23894  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23895  {
23896  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23897 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23898  VULKAN_HPP_ASSERT( d.vkCmdWriteMicromapsPropertiesEXT && "Function <vkCmdWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" );
23899 # endif
23900 
23901  d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
23902  micromaps.size(),
23903  reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
23904  static_cast<VkQueryType>( queryType ),
23905  static_cast<VkQueryPool>( queryPool ),
23906  firstQuery );
23907  }
23908 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23909 
23910  template <typename Dispatch>
23911  VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,
23913  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23914  {
23915  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23916  d.vkGetDeviceMicromapCompatibilityEXT( m_device,
23917  reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ),
23918  reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
23919  }
23920 
23921 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23922  template <typename Dispatch>
23924  Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23925  {
23926  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23927 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23928  VULKAN_HPP_ASSERT( d.vkGetDeviceMicromapCompatibilityEXT && "Function <vkGetDeviceMicromapCompatibilityEXT> requires <VK_EXT_opacity_micromap>" );
23929 # endif
23930 
23932  d.vkGetDeviceMicromapCompatibilityEXT( m_device,
23933  reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
23934  reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
23935 
23936  return compatibility;
23937  }
23938 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23939 
23940  template <typename Dispatch>
23941  VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
23942  const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,
23943  VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,
23944  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23945  {
23946  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23947  d.vkGetMicromapBuildSizesEXT( m_device,
23948  static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
23949  reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ),
23950  reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) );
23951  }
23952 
23953 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
23954  template <typename Dispatch>
23955  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
23956  Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
23957  const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,
23958  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23959  {
23960  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23961 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
23962  VULKAN_HPP_ASSERT( d.vkGetMicromapBuildSizesEXT && "Function <vkGetMicromapBuildSizesEXT> requires <VK_EXT_opacity_micromap>" );
23963 # endif
23964 
23965  VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
23966  d.vkGetMicromapBuildSizesEXT( m_device,
23967  static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
23968  reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
23969  reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
23970 
23971  return sizeInfo;
23972  }
23973 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
23974 
23975  //=== VK_HUAWEI_cluster_culling_shader ===
23976 
23977  template <typename Dispatch>
23978  VULKAN_HPP_INLINE void
23979  CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23980  {
23981  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23982  d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
23983  }
23984 
23985  template <typename Dispatch>
23986  VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer,
23988  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23989  {
23990  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
23991  d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
23992  }
23993 
23994  //=== VK_EXT_pageable_device_local_memory ===
23995 
23996  template <typename Dispatch>
23997  VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
23998  {
23999  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24000  d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority );
24001  }
24002 
24003  //=== VK_KHR_maintenance4 ===
24004 
24005  template <typename Dispatch>
24006  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
24007  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
24008  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24009  {
24010  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24011  d.vkGetDeviceBufferMemoryRequirementsKHR(
24012  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
24013  }
24014 
24015 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24016  template <typename Dispatch>
24017  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
24018  Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24019  {
24020  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24021 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24022  VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR &&
24023  "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
24024 # endif
24025 
24026  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
24027  d.vkGetDeviceBufferMemoryRequirementsKHR(
24028  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
24029 
24030  return memoryRequirements;
24031  }
24032 
24033  template <typename X, typename Y, typename... Z, typename Dispatch>
24034  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
24035  Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24036  {
24037  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24038 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24039  VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR &&
24040  "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
24041 # endif
24042 
24043  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
24044  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
24045  d.vkGetDeviceBufferMemoryRequirementsKHR(
24046  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
24047 
24048  return structureChain;
24049  }
24050 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24051 
24052  template <typename Dispatch>
24053  VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
24054  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
24055  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24056  {
24057  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24058  d.vkGetDeviceImageMemoryRequirementsKHR(
24059  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
24060  }
24061 
24062 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24063  template <typename Dispatch>
24064  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
24065  Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24066  {
24067  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24068 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24069  VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR &&
24070  "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
24071 # endif
24072 
24073  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
24074  d.vkGetDeviceImageMemoryRequirementsKHR(
24075  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
24076 
24077  return memoryRequirements;
24078  }
24079 
24080  template <typename X, typename Y, typename... Z, typename Dispatch>
24081  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
24082  Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24083  {
24084  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24085 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24086  VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR &&
24087  "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
24088 # endif
24089 
24090  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
24091  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
24092  d.vkGetDeviceImageMemoryRequirementsKHR(
24093  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
24094 
24095  return structureChain;
24096  }
24097 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24098 
24099  template <typename Dispatch>
24100  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
24101  uint32_t * pSparseMemoryRequirementCount,
24102  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
24103  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24104  {
24105  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24106  d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
24107  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
24108  pSparseMemoryRequirementCount,
24109  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
24110  }
24111 
24112 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24113  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
24114  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
24115  Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
24116  {
24117  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24118 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24119  VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR &&
24120  "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
24121 # endif
24122 
24123  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
24124  uint32_t sparseMemoryRequirementCount;
24125  d.vkGetDeviceImageSparseMemoryRequirementsKHR(
24126  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
24127  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
24128  d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
24129  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
24130  &sparseMemoryRequirementCount,
24131  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
24132 
24133  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
24134  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
24135  {
24136  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
24137  }
24138  return sparseMemoryRequirements;
24139  }
24140 
24141  template <typename SparseImageMemoryRequirements2Allocator,
24142  typename Dispatch,
24143  typename std::enable_if<
24144  std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value,
24145  int>::type>
24146  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
24147  Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
24148  SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
24149  Dispatch const & d ) const
24150  {
24151  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24152 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24153  VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR &&
24154  "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
24155 # endif
24156 
24157  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
24158  sparseImageMemoryRequirements2Allocator );
24159  uint32_t sparseMemoryRequirementCount;
24160  d.vkGetDeviceImageSparseMemoryRequirementsKHR(
24161  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
24162  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
24163  d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
24164  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
24165  &sparseMemoryRequirementCount,
24166  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
24167 
24168  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
24169  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
24170  {
24171  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
24172  }
24173  return sparseMemoryRequirements;
24174  }
24175 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24176 
24177  //=== VK_VALVE_descriptor_set_host_mapping ===
24178 
24179  template <typename Dispatch>
24180  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference,
24181  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping,
24182  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24183  {
24184  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24185  d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
24186  reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ),
24187  reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) );
24188  }
24189 
24190 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24191  template <typename Dispatch>
24192  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE
24193  Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference,
24194  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24195  {
24196  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24197 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24198  VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutHostMappingInfoVALVE &&
24199  "Function <vkGetDescriptorSetLayoutHostMappingInfoVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" );
24200 # endif
24201 
24202  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping;
24203  d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
24204  reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ),
24205  reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
24206 
24207  return hostMapping;
24208  }
24209 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24210 
24211  template <typename Dispatch>
24212  VULKAN_HPP_INLINE void
24213  Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24214  {
24215  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24216  d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData );
24217  }
24218 
24219 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24220  template <typename Dispatch>
24221  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
24222  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24223  {
24224  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24225 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24226  VULKAN_HPP_ASSERT( d.vkGetDescriptorSetHostMappingVALVE &&
24227  "Function <vkGetDescriptorSetHostMappingVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" );
24228 # endif
24229 
24230  void * pData;
24231  d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData );
24232 
24233  return pData;
24234  }
24235 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24236 
24237  //=== VK_NV_copy_memory_indirect ===
24238 
24239  template <typename Dispatch>
24240  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
24241  uint32_t copyCount,
24242  uint32_t stride,
24243  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24244  {
24245  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24246  d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride );
24247  }
24248 
24249  template <typename Dispatch>
24250  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
24251  uint32_t copyCount,
24252  uint32_t stride,
24253  VULKAN_HPP_NAMESPACE::Image dstImage,
24254  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
24255  const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources,
24256  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24257  {
24258  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24259  d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer,
24260  static_cast<VkDeviceAddress>( copyBufferAddress ),
24261  copyCount,
24262  stride,
24263  static_cast<VkImage>( dstImage ),
24264  static_cast<VkImageLayout>( dstImageLayout ),
24265  reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) );
24266  }
24267 
24268 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24269  template <typename Dispatch>
24270  VULKAN_HPP_INLINE void
24271  CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
24272  uint32_t stride,
24273  VULKAN_HPP_NAMESPACE::Image dstImage,
24274  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
24275  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources,
24276  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24277  {
24278  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24279 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24280  VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToImageIndirectNV && "Function <vkCmdCopyMemoryToImageIndirectNV> requires <VK_NV_copy_memory_indirect>" );
24281 # endif
24282 
24283  d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer,
24284  static_cast<VkDeviceAddress>( copyBufferAddress ),
24285  imageSubresources.size(),
24286  stride,
24287  static_cast<VkImage>( dstImage ),
24288  static_cast<VkImageLayout>( dstImageLayout ),
24289  reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) );
24290  }
24291 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24292 
24293  //=== VK_NV_memory_decompression ===
24294 
24295  template <typename Dispatch>
24296  VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount,
24297  const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions,
24298  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24299  {
24300  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24301  d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) );
24302  }
24303 
24304 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24305  template <typename Dispatch>
24306  VULKAN_HPP_INLINE void
24307  CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions,
24308  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24309  {
24310  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24311 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24312  VULKAN_HPP_ASSERT( d.vkCmdDecompressMemoryNV && "Function <vkCmdDecompressMemoryNV> requires <VK_NV_memory_decompression>" );
24313 # endif
24314 
24315  d.vkCmdDecompressMemoryNV(
24316  m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) );
24317  }
24318 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24319 
24320  template <typename Dispatch>
24321  VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,
24322  VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,
24323  uint32_t stride,
24324  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24325  {
24326  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24327  d.vkCmdDecompressMemoryIndirectCountNV(
24328  m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride );
24329  }
24330 
24331  //=== VK_NV_device_generated_commands_compute ===
24332 
24333  template <typename Dispatch>
24334  VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo,
24335  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
24336  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24337  {
24338  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24339  d.vkGetPipelineIndirectMemoryRequirementsNV(
24340  m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
24341  }
24342 
24343 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24344  template <typename Dispatch>
24345  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
24346  Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
24347  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24348  {
24349  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24350 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24351  VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV &&
24352  "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" );
24353 # endif
24354 
24355  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
24356  d.vkGetPipelineIndirectMemoryRequirementsNV(
24357  m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
24358 
24359  return memoryRequirements;
24360  }
24361 
24362  template <typename X, typename Y, typename... Z, typename Dispatch>
24363  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
24364  Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
24365  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24366  {
24367  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24368 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24369  VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV &&
24370  "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" );
24371 # endif
24372 
24373  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
24374  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
24375  d.vkGetPipelineIndirectMemoryRequirementsNV(
24376  m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
24377 
24378  return structureChain;
24379  }
24380 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24381 
24382  template <typename Dispatch>
24383  VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
24384  VULKAN_HPP_NAMESPACE::Pipeline pipeline,
24385  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24386  {
24387  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24388  d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
24389  }
24390 
24391  template <typename Dispatch>
24392  VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo,
24393  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24394  {
24395  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24396  return static_cast<DeviceAddress>(
24397  d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) );
24398  }
24399 
24400 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24401  template <typename Dispatch>
24403  Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24404  {
24405  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24406 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24407  VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectDeviceAddressNV &&
24408  "Function <vkGetPipelineIndirectDeviceAddressNV> requires <VK_NV_device_generated_commands_compute>" );
24409 # endif
24410 
24411  VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) );
24412 
24413  return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
24414  }
24415 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24416 
24417  //=== VK_EXT_extended_dynamic_state3 ===
24418 
24419  template <typename Dispatch>
24420  VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,
24421  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24422  {
24423  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24424  d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
24425  }
24426 
24427  template <typename Dispatch>
24428  VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24429  {
24430  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24431  d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) );
24432  }
24433 
24434  template <typename Dispatch>
24435  VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24436  {
24437  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24438  d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) );
24439  }
24440 
24441  template <typename Dispatch>
24442  VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,
24443  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24444  {
24445  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24446  d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
24447  }
24448 
24449  template <typename Dispatch>
24450  VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
24451  const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,
24452  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24453  {
24454  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24455  d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) );
24456  }
24457 
24458 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24459  template <typename Dispatch>
24460  VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
24461  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,
24462  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
24463  {
24464  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24465 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24466  VULKAN_HPP_ASSERT( d.vkCmdSetSampleMaskEXT && "Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
24467 # endif
24468 # ifdef VULKAN_HPP_NO_EXCEPTIONS
24469  VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast<uint32_t>( samples ) + 31 ) / 32 );
24470 # else
24471  if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 )
24472  {
24473  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32" );
24474  }
24475 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
24476 
24477  d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
24478  }
24479 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24480 
24481  template <typename Dispatch>
24482  VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,
24483  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24484  {
24485  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24486  d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) );
24487  }
24488 
24489  template <typename Dispatch>
24490  VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24491  {
24492  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24493  d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) );
24494  }
24495 
24496  template <typename Dispatch>
24497  VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24498  {
24499  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24500  d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) );
24501  }
24502 
24503  template <typename Dispatch>
24504  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment,
24505  uint32_t attachmentCount,
24506  const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,
24507  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24508  {
24509  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24510  d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) );
24511  }
24512 
24513 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24514  template <typename Dispatch>
24515  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment,
24516  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,
24517  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24518  {
24519  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24520 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24521  VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEnableEXT &&
24522  "Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
24523 # endif
24524 
24525  d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
24526  }
24527 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24528 
24529  template <typename Dispatch>
24530  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment,
24531  uint32_t attachmentCount,
24532  const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,
24533  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24534  {
24535  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24536  d.vkCmdSetColorBlendEquationEXT(
24537  m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) );
24538  }
24539 
24540 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24541  template <typename Dispatch>
24542  VULKAN_HPP_INLINE void
24543  CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment,
24544  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,
24545  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24546  {
24547  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24548 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24549  VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEquationEXT &&
24550  "Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
24551 # endif
24552 
24553  d.vkCmdSetColorBlendEquationEXT(
24554  m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
24555  }
24556 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24557 
24558  template <typename Dispatch>
24559  VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment,
24560  uint32_t attachmentCount,
24561  const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,
24562  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24563  {
24564  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24565  d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) );
24566  }
24567 
24568 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24569  template <typename Dispatch>
24570  VULKAN_HPP_INLINE void
24571  CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment,
24572  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,
24573  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24574  {
24575  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24576 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24577  VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteMaskEXT &&
24578  "Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
24579 # endif
24580 
24581  d.vkCmdSetColorWriteMaskEXT(
24582  m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
24583  }
24584 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24585 
24586  template <typename Dispatch>
24587  VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24588  {
24589  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24590  d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream );
24591  }
24592 
24593  template <typename Dispatch>
24594  VULKAN_HPP_INLINE void
24595  CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,
24596  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24597  {
24598  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24599  d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
24600  }
24601 
24602  template <typename Dispatch>
24603  VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize,
24604  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24605  {
24606  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24607  d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize );
24608  }
24609 
24610  template <typename Dispatch>
24611  VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24612  {
24613  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24614  d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) );
24615  }
24616 
24617  template <typename Dispatch>
24618  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,
24619  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24620  {
24621  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24622  d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) );
24623  }
24624 
24625  template <typename Dispatch>
24626  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment,
24627  uint32_t attachmentCount,
24628  const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,
24629  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24630  {
24631  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24632  d.vkCmdSetColorBlendAdvancedEXT(
24633  m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) );
24634  }
24635 
24636 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24637  template <typename Dispatch>
24638  VULKAN_HPP_INLINE void
24639  CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment,
24640  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,
24641  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24642  {
24643  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24644 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24645  VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendAdvancedEXT &&
24646  "Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
24647 # endif
24648 
24649  d.vkCmdSetColorBlendAdvancedEXT(
24650  m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
24651  }
24652 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24653 
24654  template <typename Dispatch>
24655  VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,
24656  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24657  {
24658  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24659  d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
24660  }
24661 
24662  template <typename Dispatch>
24663  VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,
24664  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24665  {
24666  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24667  d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
24668  }
24669 
24670  template <typename Dispatch>
24671  VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24672  {
24673  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24674  d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) );
24675  }
24676 
24677  template <typename Dispatch>
24678  VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,
24679  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24680  {
24681  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24682  d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) );
24683  }
24684 
24685  template <typename Dispatch>
24686  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,
24687  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24688  {
24689  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24690  d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) );
24691  }
24692 
24693  template <typename Dispatch>
24694  VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport,
24695  uint32_t viewportCount,
24696  const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,
24697  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24698  {
24699  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24700  d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) );
24701  }
24702 
24703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24704  template <typename Dispatch>
24705  VULKAN_HPP_INLINE void
24706  CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport,
24707  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,
24708  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24709  {
24710  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24711 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24712  VULKAN_HPP_ASSERT( d.vkCmdSetViewportSwizzleNV &&
24713  "Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
24714 # endif
24715 
24716  d.vkCmdSetViewportSwizzleNV(
24717  m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
24718  }
24719 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24720 
24721  template <typename Dispatch>
24722  VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,
24723  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24724  {
24725  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24726  d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) );
24727  }
24728 
24729  template <typename Dispatch>
24730  VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24731  {
24732  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24733  d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation );
24734  }
24735 
24736  template <typename Dispatch>
24737  VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,
24738  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24739  {
24740  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24741  d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
24742  }
24743 
24744  template <typename Dispatch>
24745  VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,
24746  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24747  {
24748  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24749  d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) );
24750  }
24751 
24752  template <typename Dispatch>
24753  VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount,
24754  const float * pCoverageModulationTable,
24755  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24756  {
24757  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24758  d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
24759  }
24760 
24761 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24762  template <typename Dispatch>
24763  VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,
24764  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24765  {
24766  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24767 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24768  VULKAN_HPP_ASSERT( d.vkCmdSetCoverageModulationTableNV &&
24769  "Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
24770 # endif
24771 
24772  d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() );
24773  }
24774 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24775 
24776  template <typename Dispatch>
24777  VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,
24778  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24779  {
24780  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24781  d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) );
24782  }
24783 
24784  template <typename Dispatch>
24785  VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,
24786  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24787  {
24788  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24789  d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) );
24790  }
24791 
24792  template <typename Dispatch>
24793  VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,
24794  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24795  {
24796  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24797  d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
24798  }
24799 
24800  //=== VK_EXT_shader_module_identifier ===
24801 
24802  template <typename Dispatch>
24803  VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
24804  VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
24805  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24806  {
24807  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24808  d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
24809  }
24810 
24811 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24812  template <typename Dispatch>
24813  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
24814  Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24815  {
24816  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24817 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24818  VULKAN_HPP_ASSERT( d.vkGetShaderModuleIdentifierEXT && "Function <vkGetShaderModuleIdentifierEXT> requires <VK_EXT_shader_module_identifier>" );
24819 # endif
24820 
24821  VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
24822  d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
24823 
24824  return identifier;
24825  }
24826 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24827 
24828  template <typename Dispatch>
24829  VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
24830  VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
24831  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24832  {
24833  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24834  d.vkGetShaderModuleCreateInfoIdentifierEXT(
24835  m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
24836  }
24837 
24838 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24839  template <typename Dispatch>
24840  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
24841  Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
24842  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24843  {
24844  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24845 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24846  VULKAN_HPP_ASSERT( d.vkGetShaderModuleCreateInfoIdentifierEXT &&
24847  "Function <vkGetShaderModuleCreateInfoIdentifierEXT> requires <VK_EXT_shader_module_identifier>" );
24848 # endif
24849 
24850  VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
24851  d.vkGetShaderModuleCreateInfoIdentifierEXT(
24852  m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
24853 
24854  return identifier;
24855  }
24856 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24857 
24858  //=== VK_NV_optical_flow ===
24859 
24860  template <typename Dispatch>
24862  PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,
24863  uint32_t * pFormatCount,
24864  VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,
24865  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24866  {
24867  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24868  return static_cast<Result>(
24869  d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
24870  reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ),
24871  pFormatCount,
24872  reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) );
24873  }
24874 
24875 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24876  template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch>
24879  PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
24880  Dispatch const & d ) const
24881  {
24882  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24883 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24884  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV &&
24885  "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" );
24886 # endif
24887 
24888  std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties;
24889  uint32_t formatCount;
24891  do
24892  {
24893  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
24894  m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) );
24895  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount )
24896  {
24897  imageFormatProperties.resize( formatCount );
24898  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
24899  d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
24900  reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
24901  &formatCount,
24902  reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) );
24903  }
24904  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
24905  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
24906  VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
24907  if ( formatCount < imageFormatProperties.size() )
24908  {
24909  imageFormatProperties.resize( formatCount );
24910  }
24911  return createResultValueType( result, imageFormatProperties );
24912  }
24913 
24914  template <typename OpticalFlowImageFormatPropertiesNVAllocator,
24915  typename Dispatch,
24916  typename std::enable_if<
24917  std::is_same<typename OpticalFlowImageFormatPropertiesNVAllocator::value_type, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value,
24918  int>::type>
24921  PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
24922  OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,
24923  Dispatch const & d ) const
24924  {
24925  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24926 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24927  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV &&
24928  "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" );
24929 # endif
24930 
24931  std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties(
24932  opticalFlowImageFormatPropertiesNVAllocator );
24933  uint32_t formatCount;
24935  do
24936  {
24937  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
24938  m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) );
24939  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount )
24940  {
24941  imageFormatProperties.resize( formatCount );
24942  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
24943  d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
24944  reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
24945  &formatCount,
24946  reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) );
24947  }
24948  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
24949  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
24950  VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
24951  if ( formatCount < imageFormatProperties.size() )
24952  {
24953  imageFormatProperties.resize( formatCount );
24954  }
24955  return createResultValueType( result, imageFormatProperties );
24956  }
24957 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
24958 
24959  template <typename Dispatch>
24960  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,
24961  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
24962  VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,
24963  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
24964  {
24965  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24966  return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device,
24967  reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ),
24968  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
24969  reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) );
24970  }
24971 
24972 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
24973  template <typename Dispatch>
24975  Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
24977  Dispatch const & d ) const
24978  {
24979  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
24980 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
24981  VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" );
24982 # endif
24983 
24984  VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
24985  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV(
24986  m_device,
24987  reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
24988  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
24989  reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) );
24990  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" );
24991 
24992  return createResultValueType( result, session );
24993  }
24994 
24995 # ifndef VULKAN_HPP_NO_SMART_HANDLE
24996  template <typename Dispatch>
24998  Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
25000  Dispatch const & d ) const
25001  {
25002  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25003 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25004  VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" );
25005 # endif
25006 
25007  VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
25008  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV(
25009  m_device,
25010  reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
25011  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
25012  reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) );
25013  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" );
25014 
25015  return createResultValueType(
25017  }
25018 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
25019 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25020 
25021  template <typename Dispatch>
25022  VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
25023  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
25024  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25025  {
25026  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25027  d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
25028  }
25029 
25030 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25031  template <typename Dispatch>
25032  VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
25034  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25035  {
25036  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25037 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25038  VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" );
25039 # endif
25040 
25041  d.vkDestroyOpticalFlowSessionNV(
25042  m_device,
25043  static_cast<VkOpticalFlowSessionNV>( session ),
25044  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
25045  }
25046 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25047 
25048  template <typename Dispatch>
25049  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
25050  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
25051  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25052  {
25053  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25054  d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
25055  }
25056 
25057 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25058  template <typename Dispatch>
25059  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
25061  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25062  {
25063  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25064 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25065  VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" );
25066 # endif
25067 
25068  d.vkDestroyOpticalFlowSessionNV(
25069  m_device,
25070  static_cast<VkOpticalFlowSessionNV>( session ),
25071  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
25072  }
25073 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25074 
25075 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
25076  template <typename Dispatch>
25077  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
25079  VULKAN_HPP_NAMESPACE::ImageView view,
25081  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25082  {
25083  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25084  return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device,
25085  static_cast<VkOpticalFlowSessionNV>( session ),
25086  static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
25087  static_cast<VkImageView>( view ),
25088  static_cast<VkImageLayout>( layout ) ) );
25089  }
25090 #else
25091  template <typename Dispatch>
25092  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
25093  Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
25095  VULKAN_HPP_NAMESPACE::ImageView view,
25097  Dispatch const & d ) const
25098  {
25099  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25100 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25101  VULKAN_HPP_ASSERT( d.vkBindOpticalFlowSessionImageNV && "Function <vkBindOpticalFlowSessionImageNV> requires <VK_NV_optical_flow>" );
25102 # endif
25103 
25105  static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindOpticalFlowSessionImageNV( m_device,
25106  static_cast<VkOpticalFlowSessionNV>( session ),
25107  static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
25108  static_cast<VkImageView>( view ),
25109  static_cast<VkImageLayout>( layout ) ) );
25110  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" );
25111 
25112  return createResultValueType( result );
25113  }
25114 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
25115 
25116  template <typename Dispatch>
25117  VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
25118  const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,
25119  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25120  {
25121  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25122  d.vkCmdOpticalFlowExecuteNV(
25123  m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) );
25124  }
25125 
25126 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25127  template <typename Dispatch>
25128  VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
25129  const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,
25130  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25131  {
25132  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25133 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25134  VULKAN_HPP_ASSERT( d.vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> requires <VK_NV_optical_flow>" );
25135 # endif
25136 
25137  d.vkCmdOpticalFlowExecuteNV(
25138  m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
25139  }
25140 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25141 
25142  //=== VK_KHR_maintenance5 ===
25143 
25144  template <typename Dispatch>
25145  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
25149  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25150  {
25151  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25152  d.vkCmdBindIndexBuffer2KHR( m_commandBuffer,
25153  static_cast<VkBuffer>( buffer ),
25154  static_cast<VkDeviceSize>( offset ),
25155  static_cast<VkDeviceSize>( size ),
25156  static_cast<VkIndexType>( indexType ) );
25157  }
25158 
25159  template <typename Dispatch>
25160  VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo,
25161  VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
25162  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25163  {
25164  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25165  d.vkGetRenderingAreaGranularityKHR(
25166  m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
25167  }
25168 
25169 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25170  template <typename Dispatch>
25171  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
25172  Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25173  {
25174  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25175 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25176  VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularityKHR && "Function <vkGetRenderingAreaGranularityKHR> requires <VK_KHR_maintenance5>" );
25177 # endif
25178 
25179  VULKAN_HPP_NAMESPACE::Extent2D granularity;
25180  d.vkGetRenderingAreaGranularityKHR(
25181  m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) );
25182 
25183  return granularity;
25184  }
25185 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25186 
25187  template <typename Dispatch>
25188  VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo,
25189  VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
25190  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25191  {
25192  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25193  d.vkGetDeviceImageSubresourceLayoutKHR(
25194  m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
25195  }
25196 
25197 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25198  template <typename Dispatch>
25199  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
25200  Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25201  {
25202  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25203 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25204  VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
25205 # endif
25206 
25207  VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
25208  d.vkGetDeviceImageSubresourceLayoutKHR(
25209  m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
25210 
25211  return layout;
25212  }
25213 
25214  template <typename X, typename Y, typename... Z, typename Dispatch>
25215  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
25216  Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25217  {
25218  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25219 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25220  VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
25221 # endif
25222 
25223  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
25224  VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
25225  d.vkGetDeviceImageSubresourceLayoutKHR(
25226  m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
25227 
25228  return structureChain;
25229  }
25230 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25231 
25232  template <typename Dispatch>
25233  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
25234  const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
25235  VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
25236  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25237  {
25238  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25239  d.vkGetImageSubresourceLayout2KHR( m_device,
25240  static_cast<VkImage>( image ),
25241  reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
25242  reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
25243  }
25244 
25245 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25246  template <typename Dispatch>
25247  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2KHR(
25248  VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25249  {
25250  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25251 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25253  d.vkGetImageSubresourceLayout2KHR &&
25254  "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
25255 # endif
25256 
25257  VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
25258  d.vkGetImageSubresourceLayout2KHR( m_device,
25259  static_cast<VkImage>( image ),
25260  reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
25261  reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
25262 
25263  return layout;
25264  }
25265 
25266  template <typename X, typename Y, typename... Z, typename Dispatch>
25267  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2KHR(
25268  VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25269  {
25270  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25271 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25273  d.vkGetImageSubresourceLayout2KHR &&
25274  "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
25275 # endif
25276 
25277  VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
25278  VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
25279  d.vkGetImageSubresourceLayout2KHR( m_device,
25280  static_cast<VkImage>( image ),
25281  reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
25282  reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
25283 
25284  return structureChain;
25285  }
25286 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25287 
25288  //=== VK_EXT_shader_object ===
25289 
25290  template <typename Dispatch>
25291  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount,
25292  const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,
25293  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
25294  VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
25295  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25296  {
25297  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25298  return static_cast<Result>( d.vkCreateShadersEXT( m_device,
25299  createInfoCount,
25300  reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ),
25301  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
25302  reinterpret_cast<VkShaderEXT *>( pShaders ) ) );
25303  }
25304 
25305 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25306  template <typename ShaderEXTAllocator, typename Dispatch>
25308  Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
25310  Dispatch const & d ) const
25311  {
25312  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25313 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25314  VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" );
25315 # endif
25316 
25317  std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size() );
25319  d.vkCreateShadersEXT( m_device,
25320  createInfos.size(),
25321  reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
25322  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
25323  reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
25324  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
25325 
25326  return createResultValueType( result, shaders );
25327  }
25328 
25329  template <typename ShaderEXTAllocator,
25330  typename Dispatch,
25331  typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, VULKAN_HPP_NAMESPACE::ShaderEXT>::value, int>::type>
25333  Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
25335  ShaderEXTAllocator & shaderEXTAllocator,
25336  Dispatch const & d ) const
25337  {
25338  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25339 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25340  VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" );
25341 # endif
25342 
25343  std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size(), shaderEXTAllocator );
25345  d.vkCreateShadersEXT( m_device,
25346  createInfos.size(),
25347  reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
25348  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
25349  reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
25350  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
25351 
25352  return createResultValueType( result, shaders );
25353  }
25354 
25355  template <typename Dispatch>
25357  Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
25359  Dispatch const & d ) const
25360  {
25361  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25362 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25363  VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" );
25364 # endif
25365 
25366  VULKAN_HPP_NAMESPACE::ShaderEXT shader;
25368  d.vkCreateShadersEXT( m_device,
25369  1,
25370  reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
25371  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
25372  reinterpret_cast<VkShaderEXT *>( &shader ) ) );
25373  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT" );
25374 
25375  return createResultValueType( result, shader );
25376  }
25377 
25378 # ifndef VULKAN_HPP_NO_SMART_HANDLE
25379  template <typename Dispatch, typename ShaderEXTAllocator>
25382  Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
25384  Dispatch const & d ) const
25385  {
25386  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25387 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25388  VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" );
25389 # endif
25390 
25391  std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() );
25393  d.vkCreateShadersEXT( m_device,
25394  createInfos.size(),
25395  reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
25396  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
25397  reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
25398  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
25399  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders;
25400  uniqueShaders.reserve( createInfos.size() );
25401  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
25402  for ( auto const & shader : shaders )
25403  {
25404  uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
25405  }
25406  return createResultValueType( result, std::move( uniqueShaders ) );
25407  }
25408 
25409  template <
25410  typename Dispatch,
25411  typename ShaderEXTAllocator,
25412  typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::value, int>::type>
25415  Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
25417  ShaderEXTAllocator & shaderEXTAllocator,
25418  Dispatch const & d ) const
25419  {
25420  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25421 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25422  VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" );
25423 # endif
25424 
25425  std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() );
25427  d.vkCreateShadersEXT( m_device,
25428  createInfos.size(),
25429  reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
25430  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
25431  reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
25432  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
25433  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator );
25434  uniqueShaders.reserve( createInfos.size() );
25435  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
25436  for ( auto const & shader : shaders )
25437  {
25438  uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
25439  }
25440  return createResultValueType( result, std::move( uniqueShaders ) );
25441  }
25442 
25443  template <typename Dispatch>
25445  Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
25447  Dispatch const & d ) const
25448  {
25449  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25450 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25451  VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" );
25452 # endif
25453 
25454  VULKAN_HPP_NAMESPACE::ShaderEXT shader;
25456  d.vkCreateShadersEXT( m_device,
25457  1,
25458  reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
25459  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
25460  reinterpret_cast<VkShaderEXT *>( &shader ) ) );
25461  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique" );
25462 
25463  return createResultValueType( result,
25465  }
25466 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
25467 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25468 
25469  template <typename Dispatch>
25470  VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
25471  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
25472  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25473  {
25474  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25475  d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
25476  }
25477 
25478 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25479  template <typename Dispatch>
25480  VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
25482  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25483  {
25484  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25485 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25486  VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" );
25487 # endif
25488 
25489  d.vkDestroyShaderEXT( m_device,
25490  static_cast<VkShaderEXT>( shader ),
25491  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
25492  }
25493 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25494 
25495  template <typename Dispatch>
25496  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
25497  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
25498  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25499  {
25500  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25501  d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
25502  }
25503 
25504 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25505  template <typename Dispatch>
25506  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
25508  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25509  {
25510  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25511 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25512  VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" );
25513 # endif
25514 
25515  d.vkDestroyShaderEXT( m_device,
25516  static_cast<VkShaderEXT>( shader ),
25517  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
25518  }
25519 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25520 
25521  template <typename Dispatch>
25523  Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25524  {
25525  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25526  return static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), pDataSize, pData ) );
25527  }
25528 
25529 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25530  template <typename Uint8_tAllocator, typename Dispatch>
25532  Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const
25533  {
25534  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25535 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25536  VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" );
25537 # endif
25538 
25539  std::vector<uint8_t, Uint8_tAllocator> data;
25540  size_t dataSize;
25542  do
25543  {
25544  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) );
25545  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
25546  {
25547  data.resize( dataSize );
25548  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
25549  d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
25550  }
25551  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
25552  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" );
25553  VULKAN_HPP_ASSERT( dataSize <= data.size() );
25554  if ( dataSize < data.size() )
25555  {
25556  data.resize( dataSize );
25557  }
25558  return createResultValueType( result, data );
25559  }
25560 
25561  template <typename Uint8_tAllocator,
25562  typename Dispatch,
25563  typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type>
25565  Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
25566  {
25567  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25568 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25569  VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" );
25570 # endif
25571 
25572  std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
25573  size_t dataSize;
25575  do
25576  {
25577  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) );
25578  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
25579  {
25580  data.resize( dataSize );
25581  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
25582  d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
25583  }
25584  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
25585  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" );
25586  VULKAN_HPP_ASSERT( dataSize <= data.size() );
25587  if ( dataSize < data.size() )
25588  {
25589  data.resize( dataSize );
25590  }
25591  return createResultValueType( result, data );
25592  }
25593 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25594 
25595  template <typename Dispatch>
25596  VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount,
25598  const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
25599  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25600  {
25601  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25602  d.vkCmdBindShadersEXT(
25603  m_commandBuffer, stageCount, reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), reinterpret_cast<const VkShaderEXT *>( pShaders ) );
25604  }
25605 
25606 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25607  template <typename Dispatch>
25608  VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
25609  VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,
25610  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
25611  {
25612  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25613 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25614  VULKAN_HPP_ASSERT( d.vkCmdBindShadersEXT && "Function <vkCmdBindShadersEXT> requires <VK_EXT_shader_object>" );
25615 # endif
25616 # ifdef VULKAN_HPP_NO_EXCEPTIONS
25617  VULKAN_HPP_ASSERT( stages.size() == shaders.size() );
25618 # else
25619  if ( stages.size() != shaders.size() )
25620  {
25621  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" );
25622  }
25623 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
25624 
25625  d.vkCmdBindShadersEXT( m_commandBuffer,
25626  stages.size(),
25627  reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ),
25628  reinterpret_cast<const VkShaderEXT *>( shaders.data() ) );
25629  }
25630 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25631 
25632  //=== VK_QCOM_tile_properties ===
25633 
25634  template <typename Dispatch>
25635  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
25636  uint32_t * pPropertiesCount,
25637  VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
25638  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25639  {
25640  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25641  return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM(
25642  m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
25643  }
25644 
25645 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25646  template <typename TilePropertiesQCOMAllocator, typename Dispatch>
25648  Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const
25649  {
25650  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25651 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25652  VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" );
25653 # endif
25654 
25655  std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties;
25656  uint32_t propertiesCount;
25658  do
25659  {
25660  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
25661  d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) );
25662  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount )
25663  {
25664  properties.resize( propertiesCount );
25665  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM(
25666  m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) );
25667  }
25668  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
25669 
25670  VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
25671  if ( propertiesCount < properties.size() )
25672  {
25673  properties.resize( propertiesCount );
25674  }
25675  return properties;
25676  }
25677 
25678  template <typename TilePropertiesQCOMAllocator,
25679  typename Dispatch,
25680  typename std::enable_if<std::is_same<typename TilePropertiesQCOMAllocator::value_type, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, int>::type>
25682  Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
25683  TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,
25684  Dispatch const & d ) const
25685  {
25686  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25687 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25688  VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" );
25689 # endif
25690 
25691  std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator );
25692  uint32_t propertiesCount;
25694  do
25695  {
25696  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
25697  d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) );
25698  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount )
25699  {
25700  properties.resize( propertiesCount );
25701  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM(
25702  m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) );
25703  }
25704  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
25705 
25706  VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
25707  if ( propertiesCount < properties.size() )
25708  {
25709  properties.resize( propertiesCount );
25710  }
25711  return properties;
25712  }
25713 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25714 
25715  template <typename Dispatch>
25716  VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
25717  VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
25718  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25719  {
25720  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25721  return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM(
25722  m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
25723  }
25724 
25725 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25726  template <typename Dispatch>
25727  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
25728  Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25729  {
25730  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25731 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25732  VULKAN_HPP_ASSERT( d.vkGetDynamicRenderingTilePropertiesQCOM && "Function <vkGetDynamicRenderingTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" );
25733 # endif
25734 
25735  VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties;
25736  d.vkGetDynamicRenderingTilePropertiesQCOM(
25737  m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
25738 
25739  return properties;
25740  }
25741 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25742 
25743  //=== VK_NV_low_latency2 ===
25744 
25745  template <typename Dispatch>
25746  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
25747  const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo,
25748  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25749  {
25750  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25751  return static_cast<Result>(
25752  d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) );
25753  }
25754 
25755 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25756  template <typename Dispatch>
25757  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
25758  const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo,
25759  Dispatch const & d ) const
25760  {
25761  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25762 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25763  VULKAN_HPP_ASSERT( d.vkSetLatencySleepModeNV && "Function <vkSetLatencySleepModeNV> requires <VK_NV_low_latency2>" );
25764 # endif
25765 
25767  d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( &sleepModeInfo ) ) );
25768  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" );
25769 
25770  return createResultValueType( result );
25771  }
25772 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25773 
25774  template <typename Dispatch>
25775  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
25776  const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo,
25777  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25778  {
25779  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25780  return static_cast<Result>(
25781  d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) );
25782  }
25783 
25784 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25785  template <typename Dispatch>
25787  Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, Dispatch const & d ) const
25788  {
25789  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25790 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25791  VULKAN_HPP_ASSERT( d.vkLatencySleepNV && "Function <vkLatencySleepNV> requires <VK_NV_low_latency2>" );
25792 # endif
25793 
25795  d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( &sleepInfo ) ) );
25796  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::latencySleepNV" );
25797 
25798  return createResultValueType( result );
25799  }
25800 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25801 
25802  template <typename Dispatch>
25803  VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
25804  const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo,
25805  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25806  {
25807  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25808  d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) );
25809  }
25810 
25811 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25812  template <typename Dispatch>
25813  VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
25814  const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo,
25815  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25816  {
25817  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25818 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25819  VULKAN_HPP_ASSERT( d.vkSetLatencyMarkerNV && "Function <vkSetLatencyMarkerNV> requires <VK_NV_low_latency2>" );
25820 # endif
25821 
25822  d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) );
25823  }
25824 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25825 
25826  template <typename Dispatch>
25827  VULKAN_HPP_INLINE void Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
25828  VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo,
25829  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25830  {
25831  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25832  d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) );
25833  }
25834 
25835 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25836  template <typename Dispatch>
25837  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV
25838  Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25839  {
25840  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25841 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25842  VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" );
25843 # endif
25844 
25845  VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo;
25846  d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) );
25847 
25848  return latencyMarkerInfo;
25849  }
25850 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25851 
25852  template <typename Dispatch>
25853  VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo,
25854  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25855  {
25856  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25857  d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) );
25858  }
25859 
25860 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25861  template <typename Dispatch>
25862  VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo,
25863  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25864  {
25865  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25866 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25867  VULKAN_HPP_ASSERT( d.vkQueueNotifyOutOfBandNV && "Function <vkQueueNotifyOutOfBandNV> requires <VK_NV_low_latency2>" );
25868 # endif
25869 
25870  d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( &queueTypeInfo ) );
25871  }
25872 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25873 
25874  //=== VK_KHR_cooperative_matrix ===
25875 
25876  template <typename Dispatch>
25877  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR(
25878  uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25879  {
25880  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25881  return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(
25882  m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) );
25883  }
25884 
25885 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25886  template <typename CooperativeMatrixPropertiesKHRAllocator, typename Dispatch>
25889  PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const
25890  {
25891  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25892 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25893  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR &&
25894  "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" );
25895 # endif
25896 
25897  std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties;
25898  uint32_t propertyCount;
25900  do
25901  {
25902  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
25903  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
25904  {
25905  properties.resize( propertyCount );
25906  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(
25907  m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) );
25908  }
25909  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
25910  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" );
25911  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
25912  if ( propertyCount < properties.size() )
25913  {
25914  properties.resize( propertyCount );
25915  }
25916  return createResultValueType( result, properties );
25917  }
25918 
25919  template <typename CooperativeMatrixPropertiesKHRAllocator,
25920  typename Dispatch,
25921  typename std::enable_if<
25922  std::is_same<typename CooperativeMatrixPropertiesKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>::value,
25923  int>::type>
25926  PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator,
25927  Dispatch const & d ) const
25928  {
25929  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25930 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25931  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR &&
25932  "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" );
25933 # endif
25934 
25935  std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties(
25936  cooperativeMatrixPropertiesKHRAllocator );
25937  uint32_t propertyCount;
25939  do
25940  {
25941  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
25942  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
25943  {
25944  properties.resize( propertyCount );
25945  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(
25946  m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) );
25947  }
25948  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
25949  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" );
25950  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
25951  if ( propertyCount < properties.size() )
25952  {
25953  properties.resize( propertyCount );
25954  }
25955  return createResultValueType( result, properties );
25956  }
25957 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
25958 
25959  //=== VK_EXT_attachment_feedback_loop_dynamic_state ===
25960 
25961  template <typename Dispatch>
25962  VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask,
25963  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25964  {
25965  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25966  d.vkCmdSetAttachmentFeedbackLoopEnableEXT( m_commandBuffer, static_cast<VkImageAspectFlags>( aspectMask ) );
25967  }
25968 
25969 #if defined( VK_USE_PLATFORM_SCREEN_QNX )
25970  //=== VK_QNX_external_memory_screen_buffer ===
25971 
25972  template <typename Dispatch>
25973  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer,
25974  VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties,
25975  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
25976  {
25977  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25978  return static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) );
25979  }
25980 
25981 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
25982  template <typename Dispatch>
25983  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>::type
25984  Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const
25985  {
25986  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
25987 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
25988  VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" );
25989 # endif
25990 
25991  VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties;
25993  d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) );
25994  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
25995 
25996  return createResultValueType( result, properties );
25997  }
25998 
25999  template <typename X, typename Y, typename... Z, typename Dispatch>
26000  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
26001  Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const
26002  {
26003  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26004 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26005  VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" );
26006 # endif
26007 
26008  StructureChain<X, Y, Z...> structureChain;
26009  VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>();
26011  d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) );
26012  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
26013 
26014  return createResultValueType( result, structureChain );
26015  }
26016 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
26017 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/
26018 
26019  //=== VK_KHR_calibrated_timestamps ===
26020 
26021  template <typename Dispatch>
26022  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount,
26023  VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,
26024  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26025  {
26026  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26027  return static_cast<Result>(
26028  d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) );
26029  }
26030 
26031 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
26032  template <typename TimeDomainKHRAllocator, typename Dispatch>
26034  PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const
26035  {
26036  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26037 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26038  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR &&
26039  "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
26040 # endif
26041 
26042  std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains;
26043  uint32_t timeDomainCount;
26045  do
26046  {
26047  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) );
26048  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount )
26049  {
26050  timeDomains.resize( timeDomainCount );
26051  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
26052  d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) );
26053  }
26054  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
26055  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" );
26056  VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
26057  if ( timeDomainCount < timeDomains.size() )
26058  {
26059  timeDomains.resize( timeDomainCount );
26060  }
26061  return createResultValueType( result, timeDomains );
26062  }
26063 
26064  template <typename TimeDomainKHRAllocator,
26065  typename Dispatch,
26066  typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type>
26068  PhysicalDevice::getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const
26069  {
26070  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26071 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26072  VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR &&
26073  "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
26074 # endif
26075 
26076  std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator );
26077  uint32_t timeDomainCount;
26079  do
26080  {
26081  result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) );
26082  if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount )
26083  {
26084  timeDomains.resize( timeDomainCount );
26085  result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
26086  d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) );
26087  }
26088  } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
26089  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" );
26090  VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
26091  if ( timeDomainCount < timeDomains.size() )
26092  {
26093  timeDomains.resize( timeDomainCount );
26094  }
26095  return createResultValueType( result, timeDomains );
26096  }
26097 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
26098 
26099  template <typename Dispatch>
26100  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsKHR( uint32_t timestampCount,
26101  const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,
26102  uint64_t * pTimestamps,
26103  uint64_t * pMaxDeviation,
26104  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26105  {
26106  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26107  return static_cast<Result>( d.vkGetCalibratedTimestampsKHR(
26108  m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
26109  }
26110 
26111 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
26112  template <typename Uint64_tAllocator, typename Dispatch>
26114  Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,
26115  Dispatch const & d ) const
26116  {
26117  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26118 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26119  VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR &&
26120  "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
26121 # endif
26122 
26123  std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_(
26124  std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
26125  std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first;
26126  uint64_t & maxDeviation = data_.second;
26127  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR(
26128  m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
26129  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" );
26130 
26131  return createResultValueType( result, data_ );
26132  }
26133 
26134  template <typename Uint64_tAllocator,
26135  typename Dispatch,
26136  typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type>
26138  Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,
26139  Uint64_tAllocator & uint64_tAllocator,
26140  Dispatch const & d ) const
26141  {
26142  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26143 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26144  VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR &&
26145  "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
26146 # endif
26147 
26148  std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_(
26149  std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
26150  std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first;
26151  uint64_t & maxDeviation = data_.second;
26152  VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR(
26153  m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
26154  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" );
26155 
26156  return createResultValueType( result, data_ );
26157  }
26158 
26159  template <typename Dispatch>
26161  Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const
26162  {
26163  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26164 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26165  VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR &&
26166  "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
26167 # endif
26168 
26169  std::pair<uint64_t, uint64_t> data_;
26170  uint64_t & timestamp = data_.first;
26171  uint64_t & maxDeviation = data_.second;
26173  d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( &timestampInfo ), &timestamp, &maxDeviation ) );
26174  resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" );
26175 
26176  return createResultValueType( result, data_ );
26177  }
26178 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
26179 
26180  //=== VK_KHR_maintenance6 ===
26181 
26182  template <typename Dispatch>
26183  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo,
26184  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26185  {
26186  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26187  d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( pBindDescriptorSetsInfo ) );
26188  }
26189 
26190 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
26191  template <typename Dispatch>
26192  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo,
26193  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26194  {
26195  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26196 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26197  VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2KHR && "Function <vkCmdBindDescriptorSets2KHR> requires <VK_KHR_maintenance6>" );
26198 # endif
26199 
26200  d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( &bindDescriptorSetsInfo ) );
26201  }
26202 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
26203 
26204  template <typename Dispatch>
26205  VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR * pPushConstantsInfo,
26206  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26207  {
26208  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26209  d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfoKHR *>( pPushConstantsInfo ) );
26210  }
26211 
26212 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
26213  template <typename Dispatch>
26214  VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo,
26215  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26216  {
26217  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26218 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26219  VULKAN_HPP_ASSERT( d.vkCmdPushConstants2KHR && "Function <vkCmdPushConstants2KHR> requires <VK_KHR_maintenance6>" );
26220 # endif
26221 
26222  d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfoKHR *>( &pushConstantsInfo ) );
26223  }
26224 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
26225 
26226  template <typename Dispatch>
26227  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR * pPushDescriptorSetInfo,
26228  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26229  {
26230  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26231  d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( pPushDescriptorSetInfo ) );
26232  }
26233 
26234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
26235  template <typename Dispatch>
26236  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo,
26237  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26238  {
26239  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26240 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26241  VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2KHR && "Function <vkCmdPushDescriptorSet2KHR> requires <VK_KHR_maintenance6>" );
26242 # endif
26243 
26244  d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( &pushDescriptorSetInfo ) );
26245  }
26246 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
26247 
26248  template <typename Dispatch>
26249  VULKAN_HPP_INLINE void
26250  CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo,
26251  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26252  {
26253  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26254  d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer,
26255  reinterpret_cast<const VkPushDescriptorSetWithTemplateInfoKHR *>( pPushDescriptorSetWithTemplateInfo ) );
26256  }
26257 
26258 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
26259  template <typename Dispatch>
26260  VULKAN_HPP_INLINE void
26261  CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo,
26262  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26263  {
26264  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26265 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26266  VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2KHR && "Function <vkCmdPushDescriptorSetWithTemplate2KHR> requires <VK_KHR_maintenance6>" );
26267 # endif
26268 
26269  d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer,
26270  reinterpret_cast<const VkPushDescriptorSetWithTemplateInfoKHR *>( &pushDescriptorSetWithTemplateInfo ) );
26271  }
26272 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
26273 
26274  template <typename Dispatch>
26275  VULKAN_HPP_INLINE void
26276  CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo,
26277  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26278  {
26279  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26280  d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) );
26281  }
26282 
26283 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
26284  template <typename Dispatch>
26285  VULKAN_HPP_INLINE void
26286  CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo,
26287  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26288  {
26289  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26290 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26291  VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsets2EXT && "Function <vkCmdSetDescriptorBufferOffsets2EXT> requires <VK_KHR_maintenance6>" );
26292 # endif
26293 
26294  d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( &setDescriptorBufferOffsetsInfo ) );
26295  }
26296 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
26297 
26298  template <typename Dispatch>
26299  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT(
26300  const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo,
26301  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26302  {
26303  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26304  d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT(
26305  m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) );
26306  }
26307 
26308 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
26309  template <typename Dispatch>
26310  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT(
26311  const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo,
26312  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
26313  {
26314  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26315 # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
26316  VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT &&
26317  "Function <vkCmdBindDescriptorBufferEmbeddedSamplers2EXT> requires <VK_KHR_maintenance6>" );
26318 # endif
26319 
26320  d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT(
26321  m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( &bindDescriptorBufferEmbeddedSamplersInfo ) );
26322  }
26323 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
26324 
26325 } // namespace VULKAN_HPP_NAMESPACE
26326 #endif
vk::ShaderModule createShaderModule(vk::Device const &device, vk::ShaderStageFlagBits shaderStage, std::string const &shaderText)
Definition: shaders.cpp:88
vk::RenderPass createRenderPass(vk::Device const &device, vk::Format colorFormat, vk::Format depthFormat, vk::AttachmentLoadOp loadOp, vk::ImageLayout colorFinalLayout)
Definition: utils.cpp:314
vk::Pipeline createGraphicsPipeline(vk::Device const &device, vk::PipelineCache const &pipelineCache, std::pair< vk::ShaderModule, vk::SpecializationInfo const * > const &vertexShaderData, std::pair< vk::ShaderModule, vk::SpecializationInfo const * > const &fragmentShaderData, uint32_t vertexStride, std::vector< std::pair< vk::Format, uint32_t >> const &vertexInputAttributeFormatOffset, vk::FrontFace frontFace, bool depthBuffered, vk::PipelineLayout const &pipelineLayout, vk::RenderPass const &renderPass)
Definition: utils.cpp:133
vk::Device createDevice(vk::PhysicalDevice const &physicalDevice, uint32_t queueFamilyIndex, std::vector< std::string > const &extensions, vk::PhysicalDeviceFeatures const *physicalDeviceFeatures, void const *pNext)
Definition: utils.cpp:86
vk::DescriptorSetLayout createDescriptorSetLayout(vk::Device const &device, std::vector< std::tuple< vk::DescriptorType, uint32_t, vk::ShaderStageFlags >> const &bindingData, vk::DescriptorSetLayoutCreateFlags flags)
Definition: utils.cpp:73
void updateDescriptorSets(vk::Device const &device, vk::DescriptorSet const &descriptorSet, std::vector< std::tuple< vk::DescriptorType, vk::Buffer const &, vk::DeviceSize, vk::BufferView const & >> const &bufferData, vk::su::TextureData const &textureData, uint32_t bindingOffset)
Definition: utils.cpp:660
vk::DescriptorPool createDescriptorPool(vk::Device const &device, std::vector< vk::DescriptorPoolSize > const &poolSizes)
Definition: utils.cpp:62
SampleCountFlagBits
Flags< CommandBufferResetFlagBits > CommandBufferResetFlags
SurfaceCounterFlagBitsEXT
uint32_t SampleMask
Definition: vulkan.hpp:6124
Flags< CullModeFlagBits > CullModeFlags
PrimitiveTopology
Flags< DebugUtilsMessageTypeFlagBitsEXT > DebugUtilsMessageTypeFlagsEXT
AccelerationStructureBuildTypeKHR
Flags< ImageCreateFlagBits > ImageCreateFlags
OpticalFlowSessionBindingPointNV
uint64_t DeviceAddress
Definition: vulkan.hpp:6121
Flags< ImageUsageFlagBits > ImageUsageFlags
Flags< ColorComponentFlagBits > ColorComponentFlags
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValueType< uint32_t >::type enumerateInstanceVersion(Dispatch const &d)
Flags< ImageAspectFlagBits > ImageAspectFlags
PipelineBindPoint
Flags< CommandPoolResetFlagBits > CommandPoolResetFlags
DiscardRectangleModeEXT
DescriptorType
Flags< MemoryMapFlagBits > MemoryMapFlags
Flags< QueryControlFlagBits > QueryControlFlags
Flags< ExternalMemoryHandleTypeFlagBitsNV > ExternalMemoryHandleTypeFlagsNV
CoverageModulationModeNV
PerformanceParameterTypeINTEL
Flags< PipelineStageFlagBits2 > PipelineStageFlags2
VULKAN_HPP_INLINE void resultCheck(Result result, char const *message)
Definition: vulkan.hpp:6736
ProvokingVertexModeEXT
LineRasterizationModeEXT
DebugReportObjectTypeEXT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValueType< vk::Instance >::type createInstance(const vk::InstanceCreateInfo &createInfo, Optional< const vk::AllocationCallbacks > allocator, Dispatch const &d)
AccelerationStructureCompatibilityKHR
Flags< DependencyFlagBits > DependencyFlags
Flags< ShaderStageFlagBits > ShaderStageFlags
FragmentShadingRateCombinerOpKHR
Flags< DebugReportFlagBitsEXT > DebugReportFlagsEXT
ShaderStageFlagBits
CoarseSampleOrderTypeNV
DebugUtilsMessageSeverityFlagBitsEXT
Flags< PeerMemoryFeatureFlagBits > PeerMemoryFeatureFlags
TessellationDomainOrigin
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValueType< std::vector< vk::ExtensionProperties, ExtensionPropertiesAllocator > >::type enumerateInstanceExtensionProperties(Optional< const std::string > layerName, ExtensionPropertiesAllocator &extensionPropertiesAllocator, Dispatch const &d)
VULKAN_HPP_INLINE ResultValueType< void >::type createResultValueType(Result result)
Definition: vulkan.hpp:6705
CopyAccelerationStructureModeKHR
ExternalMemoryHandleTypeFlagBits
PipelineStageFlagBits
uint32_t Bool32
Definition: vulkan.hpp:6120
Flags< QueryResultFlagBits > QueryResultFlags
ConservativeRasterizationModeEXT
Flags< PipelineStageFlagBits > PipelineStageFlags
SubpassContents
uint64_t DeviceSize
Definition: vulkan.hpp:6122
FragmentShadingRateNV
CoverageReductionModeNV
MemoryRequirements2 MemoryRequirements2KHR
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValueType< std::vector< vk::LayerProperties, LayerPropertiesAllocator > >::type enumerateInstanceLayerProperties(LayerPropertiesAllocator &layerPropertiesAllocator, Dispatch const &d)
PresentModeKHR
PipelineInfoKHR PipelineInfoEXT
Flags< DeviceGroupPresentModeFlagBitsKHR > DeviceGroupPresentModeFlagsKHR
Flags< CommandPoolTrimFlagBits > CommandPoolTrimFlags
Flags< StencilFaceFlagBits > StencilFaceFlags
void * RemoteAddressNV
Definition: vulkan.hpp:6123
Flags< DescriptorPoolResetFlagBits > DescriptorPoolResetFlags
ShaderInfoTypeAMD
ShaderGroupShaderKHR
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValueType< UniqueHandle< vk::Instance, Dispatch > >::type createInstanceUnique(const vk::InstanceCreateInfo &createInfo, Optional< const vk::AllocationCallbacks > allocator, Dispatch const &d)
module
Definition: vulkan.cppm:11
uint64_t VkVideoSessionParametersKHR
Definition: vulkan_core.h:7946
uint64_t VkImage
Definition: vulkan_core.h:100
VkFlags VkPipelineStageFlags
Definition: vulkan_core.h:2405
uint64_t VkSurfaceKHR
Definition: vulkan_core.h:7513
uint64_t VkShaderModule
Definition: vulkan_core.h:113
uint64_t VkQueryPool
Definition: vulkan_core.h:110
uint64_t VkMicromapEXT
uint64_t VkIndirectCommandsLayoutNV
VkImageLayout
Definition: vulkan_core.h:1282
VkIndexType
Definition: vulkan_core.h:2129
VkShaderStageFlagBits
Definition: vulkan_core.h:2596
uint64_t VkSampler
Definition: vulkan_core.h:119
VkFlags VkImageAspectFlags
Definition: vulkan_core.h:2202
uint64_t VkRenderPass
Definition: vulkan_core.h:117
void * VkRemoteAddressNV
uint64_t VkCommandPool
Definition: vulkan_core.h:123
#define VK_HEADER_VERSION
Definition: vulkan_core.h:72
uint64_t VkPipelineCache
Definition: vulkan_core.h:114
uint32_t VkSampleMask
Definition: vulkan_core.h:98
VkFlags VkMemoryMapFlags
Definition: vulkan_core.h:2406
uint64_t VkEvent
Definition: vulkan_core.h:109
uint64_t VkBuffer
Definition: vulkan_core.h:99
VkFlags VkImageUsageFlags
Definition: vulkan_core.h:2319
VkTessellationDomainOrigin
Definition: vulkan_core.h:4870
VkTimeDomainKHR
uint64_t VkCuModuleNVX
struct VkPhysicalDevice_T * VkPhysicalDevice
Definition: vulkan_core.h:102
VkFlags VkDependencyFlags
Definition: vulkan_core.h:2717
VkCoarseSampleOrderTypeNV
VkShaderGroupShaderKHR
uint64_t VkFramebuffer
Definition: vulkan_core.h:122
uint64_t VkDeviceSize
Definition: vulkan_core.h:96
uint64_t VkBufferView
Definition: vulkan_core.h:111
uint64_t VkCudaModuleNV
struct VkQueue_T * VkQueue
Definition: vulkan_core.h:104
VkFlags VkStencilFaceFlags
Definition: vulkan_core.h:2789
uint64_t VkDisplayKHR
Definition: vulkan_core.h:7789
VkFlags VkExternalMemoryHandleTypeFlagsNV
VkCoverageReductionModeNV
VkImageTiling
Definition: vulkan_core.h:1714
uint64_t VkDisplayModeKHR
Definition: vulkan_core.h:7790
VkPipelineBindPoint
Definition: vulkan_core.h:2111
VkFlags VkCommandPoolTrimFlags
Definition: vulkan_core.h:4952
VkAccelerationStructureCompatibilityKHR
VkShaderInfoTypeAMD
VkStencilOp
Definition: vulkan_core.h:2002
VkOpticalFlowSessionBindingPointNV
struct VkInstance_T * VkInstance
Definition: vulkan_core.h:101
struct VkDevice_T * VkDevice
Definition: vulkan_core.h:103
uint64_t VkDeferredOperationKHR
VkSurfaceCounterFlagBitsEXT
VkDebugReportObjectTypeEXT
uint64_t VkOpticalFlowSessionNV
VkFlags VkPeerMemoryFeatureFlags
Definition: vulkan_core.h:4940
uint64_t VkFence
Definition: vulkan_core.h:107
uint64_t VkDescriptorSet
Definition: vulkan_core.h:120
VkSampleCountFlagBits
Definition: vulkan_core.h:2282
VkExternalMemoryHandleTypeFlagBits
Definition: vulkan_core.h:4955
VkQueryType
Definition: vulkan_core.h:1737
VkFlags VkDebugUtilsMessageTypeFlagsEXT
VkFragmentShadingRateCombinerOpKHR
void(* PFN_vkVoidFunction)(void)
Definition: vulkan_core.h:2925
uint64_t VkDescriptorPool
Definition: vulkan_core.h:121
VkConservativeRasterizationModeEXT
uint64_t VkAccelerationStructureNV
VkFlags VkQueryControlFlags
Definition: vulkan_core.h:2774
uint64_t VkSwapchainKHR
Definition: vulkan_core.h:7628
VkCompareOp
Definition: vulkan_core.h:1864
uint64_t VkPipelineLayout
Definition: vulkan_core.h:115
VkLogicOp
Definition: vulkan_core.h:2014
uint64_t VkShaderEXT
VkFlags VkCommandPoolResetFlags
Definition: vulkan_core.h:2760
VkPerformanceParameterTypeINTEL
VkCoverageModulationModeNV
VkFlags VkShaderStageFlags
Definition: vulkan_core.h:2663
VkFragmentShadingRateNV
VkCopyAccelerationStructureModeKHR
uint64_t VkDescriptorUpdateTemplate
Definition: vulkan_core.h:4857
VkAccelerationStructureBuildTypeKHR
uint64_t VkDescriptorSetLayout
Definition: vulkan_core.h:118
uint64_t VkAccelerationStructureKHR
uint64_t VkCudaFunctionNV
VkImageType
Definition: vulkan_core.h:1721
uint64_t VkSamplerYcbcrConversion
Definition: vulkan_core.h:4856
VkPrimitiveTopology
Definition: vulkan_core.h:1979
VkFlags VkCullModeFlags
Definition: vulkan_core.h:2633
VkFlags VkDeviceGroupPresentModeFlagsKHR
Definition: vulkan_core.h:7648
uint64_t VkPipeline
Definition: vulkan_core.h:116
VkFilter
Definition: vulkan_core.h:2046
VkFlags VkQueryResultFlags
Definition: vulkan_core.h:2464
uint64_t VkDebugUtilsMessengerEXT
VkFlags VkImageCreateFlags
Definition: vulkan_core.h:2280
VkFrontFace
Definition: vulkan_core.h:1967
struct VkCommandBuffer_T * VkCommandBuffer
Definition: vulkan_core.h:106
VkFlags VkDebugReportFlagsEXT
uint64_t VkDebugReportCallbackEXT
uint64_t VkSemaphore
Definition: vulkan_core.h:105
VkPresentModeKHR
Definition: vulkan_core.h:7517
uint64_t VkCuFunctionNVX
VkLineRasterizationModeEXT
VkFormat
Definition: vulkan_core.h:1406
VkSubpassContents
Definition: vulkan_core.h:2138
uint64_t VkPrivateDataSlot
Definition: vulkan_core.h:6482
uint64_t VkDeviceAddress
Definition: vulkan_core.h:95
VkFlags VkDescriptorPoolResetFlags
Definition: vulkan_core.h:2686
uint64_t VkPerformanceConfigurationINTEL
VkPolygonMode
Definition: vulkan_core.h:1994
VkFlags64 VkPipelineStageFlags2
Definition: vulkan_core.h:6512
uint32_t VkBool32
Definition: vulkan_core.h:94
uint64_t VkImageView
Definition: vulkan_core.h:112
VkPipelineStageFlagBits
Definition: vulkan_core.h:2369
VkDiscardRectangleModeEXT
VkObjectType
Definition: vulkan_core.h:1323
VkProvokingVertexModeEXT
uint64_t VkDeviceMemory
Definition: vulkan_core.h:108
uint64_t VkValidationCacheEXT
VkFlags VkCommandBufferResetFlags
Definition: vulkan_core.h:2780
uint64_t VkVideoSessionKHR
Definition: vulkan_core.h:7945
VkFlags VkColorComponentFlags
Definition: vulkan_core.h:2540
VkDebugUtilsMessageSeverityFlagBitsEXT
#define VULKAN_HPP_ASSERT
#define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
#define VULKAN_HPP_NODISCARD
#define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
#define VULKAN_HPP_NAMESPACE_STRING
#define VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_INLINE