HDK
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
vulkan_funcs.hpp
Go to the documentation of this file.
1 // Copyright 2015-2022 The Khronos Group Inc.
2 //
3 // SPDX-License-Identifier: Apache-2.0 OR MIT
4 //
5 
6 // This header is generated from the Khronos Vulkan XML API Registry.
7 
8 #ifndef VULKAN_FUNCS_HPP
9 #define VULKAN_FUNCS_HPP
10 
11 namespace VULKAN_HPP_NAMESPACE
12 {
13  //===========================
14  //=== COMMAND Definitions ===
15  //===========================
16 
17  //=== VK_VERSION_1_0 ===
18 
19  template <typename Dispatch>
23  Dispatch const & d ) VULKAN_HPP_NOEXCEPT
24  {
25  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26  return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ),
27  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
28  reinterpret_cast<VkInstance *>( pInstance ) ) );
29  }
30 
31 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
32  template <typename Dispatch>
35  {
36  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
37 
40  d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
41  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
42  reinterpret_cast<VkInstance *>( &instance ) );
43  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
44 
45  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), instance );
46  }
47 
48 # ifndef VULKAN_HPP_NO_SMART_HANDLE
49  template <typename Dispatch>
52  {
53  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
54 
57  d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
58  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
59  reinterpret_cast<VkInstance *>( &instance ) );
60  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" );
61 
62  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
64  }
65 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
66 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
67 
68  template <typename Dispatch>
70  {
71  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
72  d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
73  }
74 
75 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
76  template <typename Dispatch>
78  {
79  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
80 
81  d.vkDestroyInstance( m_instance,
82  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
83  }
84 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
85 
86  template <typename Dispatch>
88  VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
89  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
90  {
91  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
92  return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
93  }
94 
95 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
96  template <typename PhysicalDeviceAllocator, typename Dispatch>
98  Instance::enumeratePhysicalDevices( Dispatch const & d ) const
99  {
100  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
101 
102  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
103  uint32_t physicalDeviceCount;
105  do
106  {
107  result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
108  if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
109  {
110  physicalDevices.resize( physicalDeviceCount );
111  result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
112  }
113  } while ( result == VK_INCOMPLETE );
114  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
115  VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
116  if ( physicalDeviceCount < physicalDevices.size() )
117  {
118  physicalDevices.resize( physicalDeviceCount );
119  }
120  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
121  }
122 
123  template <typename PhysicalDeviceAllocator,
124  typename Dispatch,
125  typename B1,
128  Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
129  {
130  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
131 
132  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
133  uint32_t physicalDeviceCount;
135  do
136  {
137  result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
138  if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
139  {
140  physicalDevices.resize( physicalDeviceCount );
141  result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
142  }
143  } while ( result == VK_INCOMPLETE );
144  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
145  VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
146  if ( physicalDeviceCount < physicalDevices.size() )
147  {
148  physicalDevices.resize( physicalDeviceCount );
149  }
150  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
151  }
152 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
153 
154  template <typename Dispatch>
156  {
157  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
158  d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
159  }
160 
161 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
162  template <typename Dispatch>
165  {
166  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
167 
169  d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
170 
171  return features;
172  }
173 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
174 
175  template <typename Dispatch>
177  VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
178  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
179  {
180  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
181  d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
182  }
183 
184 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
185  template <typename Dispatch>
188  {
189  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
190 
192  d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
193 
194  return formatProperties;
195  }
196 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
197 
198  template <typename Dispatch>
204  VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
205  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
206  {
207  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
208  return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
209  static_cast<VkFormat>( format ),
210  static_cast<VkImageType>( type ),
211  static_cast<VkImageTiling>( tiling ),
212  static_cast<VkImageUsageFlags>( usage ),
213  static_cast<VkImageCreateFlags>( flags ),
214  reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
215  }
216 
217 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
218  template <typename Dispatch>
225  Dispatch const & d ) const
226  {
227  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
228 
229  VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
230  VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
231  static_cast<VkFormat>( format ),
232  static_cast<VkImageType>( type ),
233  static_cast<VkImageTiling>( tiling ),
234  static_cast<VkImageUsageFlags>( usage ),
235  static_cast<VkImageCreateFlags>( flags ),
236  reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
237  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
238 
239  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
240  }
241 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
242 
243  template <typename Dispatch>
245  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
246  {
247  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
248  d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
249  }
250 
251 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
252  template <typename Dispatch>
255  {
256  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
257 
259  d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
260 
261  return properties;
262  }
263 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
264 
265  template <typename Dispatch>
266  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
267  VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
268  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
269  {
270  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
271  d.vkGetPhysicalDeviceQueueFamilyProperties(
272  m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
273  }
274 
275 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
276  template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
277  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
278  PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
279  {
280  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
281 
282  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
283  uint32_t queueFamilyPropertyCount;
284  d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
285  queueFamilyProperties.resize( queueFamilyPropertyCount );
286  d.vkGetPhysicalDeviceQueueFamilyProperties(
287  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
288 
289  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
290  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
291  {
292  queueFamilyProperties.resize( queueFamilyPropertyCount );
293  }
294  return queueFamilyProperties;
295  }
296 
297  template <typename QueueFamilyPropertiesAllocator,
298  typename Dispatch,
299  typename B1,
301  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
302  PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
303  {
304  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
305 
306  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
307  uint32_t queueFamilyPropertyCount;
308  d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
309  queueFamilyProperties.resize( queueFamilyPropertyCount );
310  d.vkGetPhysicalDeviceQueueFamilyProperties(
311  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
312 
313  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
314  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
315  {
316  queueFamilyProperties.resize( queueFamilyPropertyCount );
317  }
318  return queueFamilyProperties;
319  }
320 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
321 
322  template <typename Dispatch>
324  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
325  {
326  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
327  d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
328  }
329 
330 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
331  template <typename Dispatch>
334  {
335  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
336 
338  d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
339 
340  return memoryProperties;
341  }
342 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
343 
344  template <typename Dispatch>
345  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
346  {
347  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
348  return d.vkGetInstanceProcAddr( m_instance, pName );
349  }
350 
351 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
352  template <typename Dispatch>
354  {
355  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
356 
357  PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() );
358 
359  return result;
360  }
361 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
362 
363  template <typename Dispatch>
364  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
365  {
366  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
367  return d.vkGetDeviceProcAddr( m_device, pName );
368  }
369 
370 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
371  template <typename Dispatch>
373  {
374  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
375 
376  PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() );
377 
378  return result;
379  }
380 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
381 
382  template <typename Dispatch>
386  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
387  {
388  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
389  return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
390  reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ),
391  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
392  reinterpret_cast<VkDevice *>( pDevice ) ) );
393  }
394 
395 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
396  template <typename Dispatch>
398  const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
399  {
400  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
401 
403  VkResult result =
404  d.vkCreateDevice( m_physicalDevice,
405  reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
406  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
407  reinterpret_cast<VkDevice *>( &device ) );
408  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
409 
410  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), device );
411  }
412 
413 # ifndef VULKAN_HPP_NO_SMART_HANDLE
414  template <typename Dispatch>
418  Dispatch const & d ) const
419  {
420  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
421 
423  VkResult result =
424  d.vkCreateDevice( m_physicalDevice,
425  reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
426  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
427  reinterpret_cast<VkDevice *>( &device ) );
428  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" );
429 
430  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
432  }
433 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
434 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
435 
436  template <typename Dispatch>
438  {
439  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
440  d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
441  }
442 
443 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
444  template <typename Dispatch>
446  {
447  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
448 
449  d.vkDestroyDevice( m_device,
450  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
451  }
452 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
453 
454  template <typename Dispatch>
456  uint32_t * pPropertyCount,
458  Dispatch const & d ) VULKAN_HPP_NOEXCEPT
459  {
460  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
461  return static_cast<Result>(
462  d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
463  }
464 
465 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
466  template <typename ExtensionPropertiesAllocator, typename Dispatch>
467  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
469  {
470  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
471 
472  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
473  uint32_t propertyCount;
475  do
476  {
477  result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
478  if ( ( result == VK_SUCCESS ) && propertyCount )
479  {
480  properties.resize( propertyCount );
481  result = d.vkEnumerateInstanceExtensionProperties(
482  layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
483  }
484  } while ( result == VK_INCOMPLETE );
485  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
486  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
487  if ( propertyCount < properties.size() )
488  {
489  properties.resize( propertyCount );
490  }
491  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
492  }
493 
494  template <typename ExtensionPropertiesAllocator,
495  typename Dispatch,
496  typename B1,
498  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
500  ExtensionPropertiesAllocator & extensionPropertiesAllocator,
501  Dispatch const & d )
502  {
503  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
504 
505  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
506  uint32_t propertyCount;
508  do
509  {
510  result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
511  if ( ( result == VK_SUCCESS ) && propertyCount )
512  {
513  properties.resize( propertyCount );
514  result = d.vkEnumerateInstanceExtensionProperties(
515  layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
516  }
517  } while ( result == VK_INCOMPLETE );
518  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
519  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
520  if ( propertyCount < properties.size() )
521  {
522  properties.resize( propertyCount );
523  }
524  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
525  }
526 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
527 
528  template <typename Dispatch>
530  uint32_t * pPropertyCount,
532  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
533  {
534  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
535  return static_cast<Result>(
536  d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
537  }
538 
539 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
540  template <typename ExtensionPropertiesAllocator, typename Dispatch>
543  {
544  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
545 
546  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
547  uint32_t propertyCount;
549  do
550  {
551  result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
552  if ( ( result == VK_SUCCESS ) && propertyCount )
553  {
554  properties.resize( propertyCount );
555  result = d.vkEnumerateDeviceExtensionProperties(
556  m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
557  }
558  } while ( result == VK_INCOMPLETE );
559  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
560  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
561  if ( propertyCount < properties.size() )
562  {
563  properties.resize( propertyCount );
564  }
565  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
566  }
567 
568  template <typename ExtensionPropertiesAllocator,
569  typename Dispatch,
570  typename B1,
574  ExtensionPropertiesAllocator & extensionPropertiesAllocator,
575  Dispatch const & d ) const
576  {
577  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
578 
579  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
580  uint32_t propertyCount;
582  do
583  {
584  result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
585  if ( ( result == VK_SUCCESS ) && propertyCount )
586  {
587  properties.resize( propertyCount );
588  result = d.vkEnumerateDeviceExtensionProperties(
589  m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
590  }
591  } while ( result == VK_INCOMPLETE );
592  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
593  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
594  if ( propertyCount < properties.size() )
595  {
596  properties.resize( propertyCount );
597  }
598  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
599  }
600 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
601 
602  template <typename Dispatch>
605  Dispatch const & d ) VULKAN_HPP_NOEXCEPT
606  {
607  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
608  return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
609  }
610 
611 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
612  template <typename LayerPropertiesAllocator, typename Dispatch>
613  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
614  enumerateInstanceLayerProperties( Dispatch const & d )
615  {
616  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
617 
618  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
619  uint32_t propertyCount;
621  do
622  {
623  result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
624  if ( ( result == VK_SUCCESS ) && propertyCount )
625  {
626  properties.resize( propertyCount );
627  result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
628  }
629  } while ( result == VK_INCOMPLETE );
630  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
631  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
632  if ( propertyCount < properties.size() )
633  {
634  properties.resize( propertyCount );
635  }
636  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
637  }
638 
639  template <typename LayerPropertiesAllocator,
640  typename Dispatch,
641  typename B1,
643  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
644  enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
645  {
646  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
647 
648  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
649  uint32_t propertyCount;
651  do
652  {
653  result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
654  if ( ( result == VK_SUCCESS ) && propertyCount )
655  {
656  properties.resize( propertyCount );
657  result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
658  }
659  } while ( result == VK_INCOMPLETE );
660  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
661  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
662  if ( propertyCount < properties.size() )
663  {
664  properties.resize( propertyCount );
665  }
666  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
667  }
668 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
669 
670  template <typename Dispatch>
673  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
674  {
675  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
676  return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
677  }
678 
679 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
680  template <typename LayerPropertiesAllocator, typename Dispatch>
683  {
684  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
685 
686  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
687  uint32_t propertyCount;
689  do
690  {
691  result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
692  if ( ( result == VK_SUCCESS ) && propertyCount )
693  {
694  properties.resize( propertyCount );
695  result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
696  }
697  } while ( result == VK_INCOMPLETE );
698  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
699  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
700  if ( propertyCount < properties.size() )
701  {
702  properties.resize( propertyCount );
703  }
704  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
705  }
706 
707  template <typename LayerPropertiesAllocator,
708  typename Dispatch,
709  typename B1,
712  PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const
713  {
714  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
715 
716  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
717  uint32_t propertyCount;
719  do
720  {
721  result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
722  if ( ( result == VK_SUCCESS ) && propertyCount )
723  {
724  properties.resize( propertyCount );
725  result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
726  }
727  } while ( result == VK_INCOMPLETE );
728  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
729  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
730  if ( propertyCount < properties.size() )
731  {
732  properties.resize( propertyCount );
733  }
734  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
735  }
736 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
737 
738  template <typename Dispatch>
739  VULKAN_HPP_INLINE void
740  Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
741  {
742  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
743  d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
744  }
745 
746 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
747  template <typename Dispatch>
749  Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
750  {
751  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
752 
754  d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
755 
756  return queue;
757  }
758 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
759 
760  template <typename Dispatch>
762  const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
764  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
765  {
766  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
767  return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
768  }
769 
770 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
771  template <typename Dispatch>
774  {
775  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
776 
777  VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
778  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
779 
780  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
781  }
782 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
783 
784 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
785  template <typename Dispatch>
787  {
788  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
789  return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
790  }
791 #else
792  template <typename Dispatch>
794  {
795  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
796 
797  VkResult result = d.vkQueueWaitIdle( m_queue );
798  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
799 
800  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
801  }
802 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
803 
804 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
805  template <typename Dispatch>
807  {
808  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
809  return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
810  }
811 #else
812  template <typename Dispatch>
814  {
815  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
816 
817  VkResult result = d.vkDeviceWaitIdle( m_device );
818  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
819 
820  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
821  }
822 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
823 
824  template <typename Dispatch>
828  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
829  {
830  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
831  return static_cast<Result>( d.vkAllocateMemory( m_device,
832  reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ),
833  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
834  reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
835  }
836 
837 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
838  template <typename Dispatch>
842  Dispatch const & d ) const
843  {
844  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
845 
847  VkResult result =
848  d.vkAllocateMemory( m_device,
849  reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
850  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
851  reinterpret_cast<VkDeviceMemory *>( &memory ) );
852  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
853 
854  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memory );
855  }
856 
857 # ifndef VULKAN_HPP_NO_SMART_HANDLE
858  template <typename Dispatch>
862  Dispatch const & d ) const
863  {
864  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
865 
867  VkResult result =
868  d.vkAllocateMemory( m_device,
869  reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
870  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
871  reinterpret_cast<VkDeviceMemory *>( &memory ) );
872  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" );
873 
874  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
876  }
877 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
878 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
879 
880  template <typename Dispatch>
883  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
884  {
885  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
886  d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
887  }
888 
889 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
890  template <typename Dispatch>
893  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
894  {
895  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
896 
897  d.vkFreeMemory( m_device,
898  static_cast<VkDeviceMemory>( memory ),
899  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
900  }
901 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
902 
903  template <typename Dispatch>
906  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
907  {
908  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
909  d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
910  }
911 
912 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
913  template <typename Dispatch>
915  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
916  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
917  {
918  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
919 
920  d.vkFreeMemory( m_device,
921  static_cast<VkDeviceMemory>( memory ),
922  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
923  }
924 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
925 
926  template <typename Dispatch>
931  void ** ppData,
932  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
933  {
934  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
935  return static_cast<Result>( d.vkMapMemory( m_device,
936  static_cast<VkDeviceMemory>( memory ),
937  static_cast<VkDeviceSize>( offset ),
938  static_cast<VkDeviceSize>( size ),
939  static_cast<VkMemoryMapFlags>( flags ),
940  ppData ) );
941  }
942 
943 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
944  template <typename Dispatch>
949  Dispatch const & d ) const
950  {
951  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
952 
953  void * pData;
954  VkResult result = d.vkMapMemory( m_device,
955  static_cast<VkDeviceMemory>( memory ),
956  static_cast<VkDeviceSize>( offset ),
957  static_cast<VkDeviceSize>( size ),
958  static_cast<VkMemoryMapFlags>( flags ),
959  &pData );
960  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
961 
962  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pData );
963  }
964 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
965 
966  template <typename Dispatch>
968  {
969  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
970  d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
971  }
972 
973  template <typename Dispatch>
975  const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
976  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
977  {
978  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
979  return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
980  }
981 
982 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
983  template <typename Dispatch>
986  Dispatch const & d ) const
987  {
988  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
989 
990  VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
991  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
992 
993  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
994  }
995 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
996 
997  template <typename Dispatch>
999  const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
1000  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1001  {
1002  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1003  return static_cast<Result>(
1004  d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
1005  }
1006 
1007 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1008  template <typename Dispatch>
1011  Dispatch const & d ) const
1012  {
1013  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1014 
1015  VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
1016  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
1017 
1018  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1019  }
1020 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1021 
1022  template <typename Dispatch>
1024  VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
1025  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1026  {
1027  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1028  d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
1029  }
1030 
1031 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1032  template <typename Dispatch>
1034  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1035  {
1036  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1037 
1038  VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
1039  d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
1040 
1041  return committedMemoryInBytes;
1042  }
1043 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1044 
1045 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1046  template <typename Dispatch>
1049  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
1050  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1051  {
1052  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1053  return static_cast<Result>(
1054  d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
1055  }
1056 #else
1057  template <typename Dispatch>
1060  {
1061  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1062 
1063  VkResult result =
1064  d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
1065  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
1066 
1067  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1068  }
1069 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1070 
1071 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1072  template <typename Dispatch>
1075  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
1076  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1077  {
1078  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1079  return static_cast<Result>(
1080  d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
1081  }
1082 #else
1083  template <typename Dispatch>
1086  {
1087  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1088 
1089  VkResult result =
1090  d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
1091  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
1092 
1093  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1094  }
1095 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1096 
1097  template <typename Dispatch>
1099  VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
1100  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1101  {
1102  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1103  d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
1104  }
1105 
1106 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1107  template <typename Dispatch>
1110  {
1111  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1112 
1113  VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
1114  d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
1115 
1116  return memoryRequirements;
1117  }
1118 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1119 
1120  template <typename Dispatch>
1122  VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
1123  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1124  {
1125  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1126  d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
1127  }
1128 
1129 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1130  template <typename Dispatch>
1133  {
1134  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1135 
1136  VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
1137  d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
1138 
1139  return memoryRequirements;
1140  }
1141 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1142 
1143  template <typename Dispatch>
1145  uint32_t * pSparseMemoryRequirementCount,
1146  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
1147  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1148  {
1149  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1150  d.vkGetImageSparseMemoryRequirements( m_device,
1151  static_cast<VkImage>( image ),
1152  pSparseMemoryRequirementCount,
1153  reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
1154  }
1155 
1156 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1157  template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
1158  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
1160  {
1161  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1162 
1163  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
1164  uint32_t sparseMemoryRequirementCount;
1165  d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
1166  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1167  d.vkGetImageSparseMemoryRequirements( m_device,
1168  static_cast<VkImage>( image ),
1169  &sparseMemoryRequirementCount,
1170  reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
1171 
1172  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
1173  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
1174  {
1175  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1176  }
1177  return sparseMemoryRequirements;
1178  }
1179 
1180  template <typename SparseImageMemoryRequirementsAllocator,
1181  typename Dispatch,
1182  typename B1,
1184  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
1186  SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
1187  Dispatch const & d ) const
1188  {
1189  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1190 
1191  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
1192  sparseImageMemoryRequirementsAllocator );
1193  uint32_t sparseMemoryRequirementCount;
1194  d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
1195  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1196  d.vkGetImageSparseMemoryRequirements( m_device,
1197  static_cast<VkImage>( image ),
1198  &sparseMemoryRequirementCount,
1199  reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
1200 
1201  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
1202  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
1203  {
1204  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1205  }
1206  return sparseMemoryRequirements;
1207  }
1208 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1209 
1210  template <typename Dispatch>
1216  uint32_t * pPropertyCount,
1218  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1219  {
1220  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1221  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1222  static_cast<VkFormat>( format ),
1223  static_cast<VkImageType>( type ),
1224  static_cast<VkSampleCountFlagBits>( samples ),
1225  static_cast<VkImageUsageFlags>( usage ),
1226  static_cast<VkImageTiling>( tiling ),
1227  pPropertyCount,
1228  reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
1229  }
1230 
1231 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1232  template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
1233  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
1239  Dispatch const & d ) const
1240  {
1241  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1242 
1243  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
1244  uint32_t propertyCount;
1245  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1246  static_cast<VkFormat>( format ),
1247  static_cast<VkImageType>( type ),
1248  static_cast<VkSampleCountFlagBits>( samples ),
1249  static_cast<VkImageUsageFlags>( usage ),
1250  static_cast<VkImageTiling>( tiling ),
1251  &propertyCount,
1252  nullptr );
1253  properties.resize( propertyCount );
1254  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1255  static_cast<VkFormat>( format ),
1256  static_cast<VkImageType>( type ),
1257  static_cast<VkSampleCountFlagBits>( samples ),
1258  static_cast<VkImageUsageFlags>( usage ),
1259  static_cast<VkImageTiling>( tiling ),
1260  &propertyCount,
1261  reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
1262 
1263  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
1264  if ( propertyCount < properties.size() )
1265  {
1266  properties.resize( propertyCount );
1267  }
1268  return properties;
1269  }
1270 
1271  template <typename SparseImageFormatPropertiesAllocator,
1272  typename Dispatch,
1273  typename B1,
1275  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
1281  SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
1282  Dispatch const & d ) const
1283  {
1284  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1285 
1286  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
1287  uint32_t propertyCount;
1288  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1289  static_cast<VkFormat>( format ),
1290  static_cast<VkImageType>( type ),
1291  static_cast<VkSampleCountFlagBits>( samples ),
1292  static_cast<VkImageUsageFlags>( usage ),
1293  static_cast<VkImageTiling>( tiling ),
1294  &propertyCount,
1295  nullptr );
1296  properties.resize( propertyCount );
1297  d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1298  static_cast<VkFormat>( format ),
1299  static_cast<VkImageType>( type ),
1300  static_cast<VkSampleCountFlagBits>( samples ),
1301  static_cast<VkImageUsageFlags>( usage ),
1302  static_cast<VkImageTiling>( tiling ),
1303  &propertyCount,
1304  reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
1305 
1306  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
1307  if ( propertyCount < properties.size() )
1308  {
1309  properties.resize( propertyCount );
1310  }
1311  return properties;
1312  }
1313 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1314 
1315  template <typename Dispatch>
1317  const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
1319  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1320  {
1321  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1322  return static_cast<Result>(
1323  d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
1324  }
1325 
1326 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1327  template <typename Dispatch>
1330  {
1331  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1332 
1333  VkResult result =
1334  d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) );
1335  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
1336 
1337  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1338  }
1339 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1340 
1341  template <typename Dispatch>
1343  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1344  VULKAN_HPP_NAMESPACE::Fence * pFence,
1345  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1346  {
1347  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1348  return static_cast<Result>( d.vkCreateFence( m_device,
1349  reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ),
1350  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1351  reinterpret_cast<VkFence *>( pFence ) ) );
1352  }
1353 
1354 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1355  template <typename Dispatch>
1357  const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1358  {
1359  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1360 
1362  VkResult result =
1363  d.vkCreateFence( m_device,
1364  reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
1365  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1366  reinterpret_cast<VkFence *>( &fence ) );
1367  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
1368 
1369  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
1370  }
1371 
1372 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1373  template <typename Dispatch>
1375  const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1376  {
1377  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1378 
1380  VkResult result =
1381  d.vkCreateFence( m_device,
1382  reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
1383  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1384  reinterpret_cast<VkFence *>( &fence ) );
1385  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" );
1386 
1387  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1389  }
1390 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
1391 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1392 
1393  template <typename Dispatch>
1395  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1396  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1397  {
1398  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1399  d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1400  }
1401 
1402 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1403  template <typename Dispatch>
1406  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1407  {
1408  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1409 
1410  d.vkDestroyFence( m_device,
1411  static_cast<VkFence>( fence ),
1412  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1413  }
1414 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1415 
1416  template <typename Dispatch>
1418  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1419  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1420  {
1421  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1422  d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1423  }
1424 
1425 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1426  template <typename Dispatch>
1429  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1430  {
1431  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1432 
1433  d.vkDestroyFence( m_device,
1434  static_cast<VkFence>( fence ),
1435  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1436  }
1437 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1438 
1439  template <typename Dispatch>
1441  const VULKAN_HPP_NAMESPACE::Fence * pFences,
1442  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1443  {
1444  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1445  return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
1446  }
1447 
1448 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1449  template <typename Dispatch>
1452  {
1453  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1454 
1455  VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
1456  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
1457 
1458  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1459  }
1460 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1461 
1462 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1463  template <typename Dispatch>
1465  {
1466  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1467  return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
1468  }
1469 #else
1470  template <typename Dispatch>
1472  {
1473  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1474 
1475  VkResult result = d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) );
1476  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1477  VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus",
1479 
1480  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
1481  }
1482 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1483 
1484  template <typename Dispatch>
1486  const VULKAN_HPP_NAMESPACE::Fence * pFences,
1488  uint64_t timeout,
1489  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1490  {
1491  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1492  return static_cast<Result>(
1493  d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
1494  }
1495 
1496 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1497  template <typename Dispatch>
1501  uint64_t timeout,
1502  Dispatch const & d ) const
1503  {
1504  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1505 
1506  VkResult result =
1507  d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
1508  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1509  VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
1511 
1512  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
1513  }
1514 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1515 
1516  template <typename Dispatch>
1518  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1519  VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
1520  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1521  {
1522  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1523  return static_cast<Result>( d.vkCreateSemaphore( m_device,
1524  reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ),
1525  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1526  reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
1527  }
1528 
1529 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1530  template <typename Dispatch>
1534  Dispatch const & d ) const
1535  {
1536  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1537 
1539  VkResult result =
1540  d.vkCreateSemaphore( m_device,
1541  reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
1542  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1543  reinterpret_cast<VkSemaphore *>( &semaphore ) );
1544  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
1545 
1546  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), semaphore );
1547  }
1548 
1549 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1550  template <typename Dispatch>
1554  Dispatch const & d ) const
1555  {
1556  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1557 
1559  VkResult result =
1560  d.vkCreateSemaphore( m_device,
1561  reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
1562  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1563  reinterpret_cast<VkSemaphore *>( &semaphore ) );
1564  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" );
1565 
1566  return createResultValueType(
1567  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1569  }
1570 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
1571 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1572 
1573  template <typename Dispatch>
1575  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1576  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1577  {
1578  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1579  d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1580  }
1581 
1582 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1583  template <typename Dispatch>
1586  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1587  {
1588  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1589 
1590  d.vkDestroySemaphore( m_device,
1591  static_cast<VkSemaphore>( semaphore ),
1592  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1593  }
1594 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1595 
1596  template <typename Dispatch>
1598  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1599  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1600  {
1601  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1602  d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1603  }
1604 
1605 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1606  template <typename Dispatch>
1609  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1610  {
1611  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1612 
1613  d.vkDestroySemaphore( m_device,
1614  static_cast<VkSemaphore>( semaphore ),
1615  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1616  }
1617 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1618 
1619  template <typename Dispatch>
1621  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1622  VULKAN_HPP_NAMESPACE::Event * pEvent,
1623  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1624  {
1625  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1626  return static_cast<Result>( d.vkCreateEvent( m_device,
1627  reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ),
1628  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1629  reinterpret_cast<VkEvent *>( pEvent ) ) );
1630  }
1631 
1632 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1633  template <typename Dispatch>
1635  const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1636  {
1637  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1638 
1640  VkResult result =
1641  d.vkCreateEvent( m_device,
1642  reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
1643  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1644  reinterpret_cast<VkEvent *>( &event ) );
1645  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
1646 
1647  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), event );
1648  }
1649 
1650 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1651  template <typename Dispatch>
1653  const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1654  {
1655  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1656 
1658  VkResult result =
1659  d.vkCreateEvent( m_device,
1660  reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
1661  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1662  reinterpret_cast<VkEvent *>( &event ) );
1663  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" );
1664 
1665  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1667  }
1668 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
1669 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1670 
1671  template <typename Dispatch>
1673  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1674  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1675  {
1676  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1677  d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1678  }
1679 
1680 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1681  template <typename Dispatch>
1684  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1685  {
1686  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1687 
1688  d.vkDestroyEvent( m_device,
1689  static_cast<VkEvent>( event ),
1690  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1691  }
1692 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1693 
1694  template <typename Dispatch>
1696  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1697  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1698  {
1699  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1700  d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1701  }
1702 
1703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1704  template <typename Dispatch>
1707  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1708  {
1709  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1710 
1711  d.vkDestroyEvent( m_device,
1712  static_cast<VkEvent>( event ),
1713  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1714  }
1715 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1716 
1717 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1718  template <typename Dispatch>
1720  {
1721  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1722  return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
1723  }
1724 #else
1725  template <typename Dispatch>
1727  {
1728  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1729 
1730  VkResult result = d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) );
1731  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1732  VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus",
1734 
1735  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
1736  }
1737 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1738 
1739 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1740  template <typename Dispatch>
1742  {
1743  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1744  return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
1745  }
1746 #else
1747  template <typename Dispatch>
1749  Dispatch const & d ) const
1750  {
1751  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1752 
1753  VkResult result = d.vkSetEvent( m_device, static_cast<VkEvent>( event ) );
1754  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
1755 
1756  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1757  }
1758 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1759 
1760 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1761  template <typename Dispatch>
1763  {
1764  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1765  return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
1766  }
1767 #else
1768  template <typename Dispatch>
1770  {
1771  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1772 
1773  VkResult result = d.vkResetEvent( m_device, static_cast<VkEvent>( event ) );
1774  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
1775 
1776  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1777  }
1778 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1779 
1780  template <typename Dispatch>
1782  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1783  VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
1784  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1785  {
1786  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1787  return static_cast<Result>( d.vkCreateQueryPool( m_device,
1788  reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ),
1789  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1790  reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
1791  }
1792 
1793 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1794  template <typename Dispatch>
1798  Dispatch const & d ) const
1799  {
1800  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1801 
1803  VkResult result =
1804  d.vkCreateQueryPool( m_device,
1805  reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
1806  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1807  reinterpret_cast<VkQueryPool *>( &queryPool ) );
1808  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
1809 
1810  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), queryPool );
1811  }
1812 
1813 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1814  template <typename Dispatch>
1818  Dispatch const & d ) const
1819  {
1820  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1821 
1823  VkResult result =
1824  d.vkCreateQueryPool( m_device,
1825  reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
1826  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1827  reinterpret_cast<VkQueryPool *>( &queryPool ) );
1828  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" );
1829 
1830  return createResultValueType(
1831  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1833  }
1834 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
1835 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1836 
1837  template <typename Dispatch>
1839  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1840  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1841  {
1842  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1843  d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1844  }
1845 
1846 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1847  template <typename Dispatch>
1850  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1851  {
1852  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1853 
1854  d.vkDestroyQueryPool( m_device,
1855  static_cast<VkQueryPool>( queryPool ),
1856  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1857  }
1858 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1859 
1860  template <typename Dispatch>
1862  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1863  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1864  {
1865  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1866  d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1867  }
1868 
1869 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1870  template <typename Dispatch>
1873  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1874  {
1875  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1876 
1877  d.vkDestroyQueryPool( m_device,
1878  static_cast<VkQueryPool>( queryPool ),
1879  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1880  }
1881 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1882 
1883  template <typename Dispatch>
1885  uint32_t firstQuery,
1886  uint32_t queryCount,
1887  size_t dataSize,
1888  void * pData,
1891  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1892  {
1893  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1894  return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
1895  static_cast<VkQueryPool>( queryPool ),
1896  firstQuery,
1897  queryCount,
1898  dataSize,
1899  pData,
1900  static_cast<VkDeviceSize>( stride ),
1901  static_cast<VkQueryResultFlags>( flags ) ) );
1902  }
1903 
1904 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1905  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
1908  uint32_t firstQuery,
1909  uint32_t queryCount,
1910  size_t dataSize,
1913  Dispatch const & d ) const
1914  {
1915  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1916 
1917  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
1918  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
1919  VkResult result = d.vkGetQueryPoolResults( m_device,
1920  static_cast<VkQueryPool>( queryPool ),
1921  firstQuery,
1922  queryCount,
1923  data.size() * sizeof( DataType ),
1924  reinterpret_cast<void *>( data.data() ),
1925  static_cast<VkDeviceSize>( stride ),
1926  static_cast<VkQueryResultFlags>( flags ) );
1927  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1928  VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
1930 
1932  }
1933 
1934  template <typename DataType, typename Dispatch>
1936  uint32_t firstQuery,
1937  uint32_t queryCount,
1940  Dispatch const & d ) const
1941  {
1942  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1943 
1944  DataType data;
1945  VkResult result = d.vkGetQueryPoolResults( m_device,
1946  static_cast<VkQueryPool>( queryPool ),
1947  firstQuery,
1948  queryCount,
1949  sizeof( DataType ),
1950  reinterpret_cast<void *>( &data ),
1951  static_cast<VkDeviceSize>( stride ),
1952  static_cast<VkQueryResultFlags>( flags ) );
1953  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1954  VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult",
1956 
1957  return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
1958  }
1959 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1960 
1961  template <typename Dispatch>
1963  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
1964  VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
1965  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1966  {
1967  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1968  return static_cast<Result>( d.vkCreateBuffer( m_device,
1969  reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ),
1970  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1971  reinterpret_cast<VkBuffer *>( pBuffer ) ) );
1972  }
1973 
1974 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1975  template <typename Dispatch>
1977  const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1978  {
1979  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1980 
1982  VkResult result =
1983  d.vkCreateBuffer( m_device,
1984  reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
1985  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1986  reinterpret_cast<VkBuffer *>( &buffer ) );
1987  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
1988 
1989  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
1990  }
1991 
1992 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1993  template <typename Dispatch>
1995  const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1996  {
1997  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1998 
2000  VkResult result =
2001  d.vkCreateBuffer( m_device,
2002  reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
2003  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2004  reinterpret_cast<VkBuffer *>( &buffer ) );
2005  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" );
2006 
2007  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2009  }
2010 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2011 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2012 
2013  template <typename Dispatch>
2015  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2016  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2017  {
2018  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2019  d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2020  }
2021 
2022 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2023  template <typename Dispatch>
2026  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2027  {
2028  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2029 
2030  d.vkDestroyBuffer( m_device,
2031  static_cast<VkBuffer>( buffer ),
2032  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2033  }
2034 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2035 
2036  template <typename Dispatch>
2038  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2039  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2040  {
2041  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2042  d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2043  }
2044 
2045 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2046  template <typename Dispatch>
2049  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2050  {
2051  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2052 
2053  d.vkDestroyBuffer( m_device,
2054  static_cast<VkBuffer>( buffer ),
2055  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2056  }
2057 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2058 
2059  template <typename Dispatch>
2061  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2063  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2064  {
2065  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2066  return static_cast<Result>( d.vkCreateBufferView( m_device,
2067  reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ),
2068  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2069  reinterpret_cast<VkBufferView *>( pView ) ) );
2070  }
2071 
2072 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2073  template <typename Dispatch>
2077  Dispatch const & d ) const
2078  {
2079  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2080 
2082  VkResult result =
2083  d.vkCreateBufferView( m_device,
2084  reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
2085  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2086  reinterpret_cast<VkBufferView *>( &view ) );
2087  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
2088 
2089  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
2090  }
2091 
2092 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2093  template <typename Dispatch>
2097  Dispatch const & d ) const
2098  {
2099  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2100 
2102  VkResult result =
2103  d.vkCreateBufferView( m_device,
2104  reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
2105  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2106  reinterpret_cast<VkBufferView *>( &view ) );
2107  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" );
2108 
2109  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2111  }
2112 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2113 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2114 
2115  template <typename Dispatch>
2117  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2118  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2119  {
2120  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2121  d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2122  }
2123 
2124 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2125  template <typename Dispatch>
2128  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2129  {
2130  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2131 
2132  d.vkDestroyBufferView( m_device,
2133  static_cast<VkBufferView>( bufferView ),
2134  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2135  }
2136 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2137 
2138  template <typename Dispatch>
2140  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2141  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2142  {
2143  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2144  d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2145  }
2146 
2147 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2148  template <typename Dispatch>
2151  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2152  {
2153  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2154 
2155  d.vkDestroyBufferView( m_device,
2156  static_cast<VkBufferView>( bufferView ),
2157  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2158  }
2159 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2160 
2161  template <typename Dispatch>
2163  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2164  VULKAN_HPP_NAMESPACE::Image * pImage,
2165  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2166  {
2167  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2168  return static_cast<Result>( d.vkCreateImage( m_device,
2169  reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ),
2170  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2171  reinterpret_cast<VkImage *>( pImage ) ) );
2172  }
2173 
2174 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2175  template <typename Dispatch>
2177  const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2178  {
2179  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2180 
2182  VkResult result =
2183  d.vkCreateImage( m_device,
2184  reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
2185  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2186  reinterpret_cast<VkImage *>( &image ) );
2187  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
2188 
2189  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), image );
2190  }
2191 
2192 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2193  template <typename Dispatch>
2195  const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2196  {
2197  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2198 
2200  VkResult result =
2201  d.vkCreateImage( m_device,
2202  reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
2203  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2204  reinterpret_cast<VkImage *>( &image ) );
2205  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" );
2206 
2207  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2209  }
2210 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2211 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2212 
2213  template <typename Dispatch>
2215  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2216  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2217  {
2218  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2219  d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2220  }
2221 
2222 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2223  template <typename Dispatch>
2226  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2227  {
2228  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2229 
2230  d.vkDestroyImage( m_device,
2231  static_cast<VkImage>( image ),
2232  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2233  }
2234 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2235 
2236  template <typename Dispatch>
2238  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2239  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2240  {
2241  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2242  d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2243  }
2244 
2245 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2246  template <typename Dispatch>
2249  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2250  {
2251  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2252 
2253  d.vkDestroyImage( m_device,
2254  static_cast<VkImage>( image ),
2255  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2256  }
2257 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2258 
2259  template <typename Dispatch>
2261  const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
2263  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2264  {
2265  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2266  d.vkGetImageSubresourceLayout( m_device,
2267  static_cast<VkImage>( image ),
2268  reinterpret_cast<const VkImageSubresource *>( pSubresource ),
2269  reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
2270  }
2271 
2272 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2273  template <typename Dispatch>
2275  VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2276  {
2277  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2278 
2280  d.vkGetImageSubresourceLayout( m_device,
2281  static_cast<VkImage>( image ),
2282  reinterpret_cast<const VkImageSubresource *>( &subresource ),
2283  reinterpret_cast<VkSubresourceLayout *>( &layout ) );
2284 
2285  return layout;
2286  }
2287 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2288 
2289  template <typename Dispatch>
2291  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2293  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2294  {
2295  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2296  return static_cast<Result>( d.vkCreateImageView( m_device,
2297  reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ),
2298  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2299  reinterpret_cast<VkImageView *>( pView ) ) );
2300  }
2301 
2302 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2303  template <typename Dispatch>
2307  Dispatch const & d ) const
2308  {
2309  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2310 
2312  VkResult result =
2313  d.vkCreateImageView( m_device,
2314  reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
2315  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2316  reinterpret_cast<VkImageView *>( &view ) );
2317  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
2318 
2319  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
2320  }
2321 
2322 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2323  template <typename Dispatch>
2327  Dispatch const & d ) const
2328  {
2329  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2330 
2332  VkResult result =
2333  d.vkCreateImageView( m_device,
2334  reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
2335  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2336  reinterpret_cast<VkImageView *>( &view ) );
2337  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" );
2338 
2339  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2341  }
2342 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2343 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2344 
2345  template <typename Dispatch>
2347  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2348  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2349  {
2350  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2351  d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2352  }
2353 
2354 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2355  template <typename Dispatch>
2358  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2359  {
2360  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2361 
2362  d.vkDestroyImageView( m_device,
2363  static_cast<VkImageView>( imageView ),
2364  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2365  }
2366 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2367 
2368  template <typename Dispatch>
2370  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2371  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2372  {
2373  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2374  d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2375  }
2376 
2377 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2378  template <typename Dispatch>
2381  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2382  {
2383  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2384 
2385  d.vkDestroyImageView( m_device,
2386  static_cast<VkImageView>( imageView ),
2387  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2388  }
2389 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2390 
2391  template <typename Dispatch>
2393  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2394  VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
2395  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2396  {
2397  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2398  return static_cast<Result>( d.vkCreateShaderModule( m_device,
2399  reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
2400  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2401  reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
2402  }
2403 
2404 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2405  template <typename Dispatch>
2409  Dispatch const & d ) const
2410  {
2411  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2412 
2414  VkResult result =
2415  d.vkCreateShaderModule( m_device,
2416  reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
2417  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2418  reinterpret_cast<VkShaderModule *>( &shaderModule ) );
2419  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
2420 
2421  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaderModule );
2422  }
2423 
2424 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2425  template <typename Dispatch>
2429  Dispatch const & d ) const
2430  {
2431  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2432 
2434  VkResult result =
2435  d.vkCreateShaderModule( m_device,
2436  reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
2437  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2438  reinterpret_cast<VkShaderModule *>( &shaderModule ) );
2439  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" );
2440 
2441  return createResultValueType(
2442  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2444  }
2445 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2446 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2447 
2448  template <typename Dispatch>
2450  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2451  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2452  {
2453  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2454  d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2455  }
2456 
2457 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2458  template <typename Dispatch>
2461  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2462  {
2463  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2464 
2465  d.vkDestroyShaderModule( m_device,
2466  static_cast<VkShaderModule>( shaderModule ),
2467  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2468  }
2469 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2470 
2471  template <typename Dispatch>
2473  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2474  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2475  {
2476  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2477  d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2478  }
2479 
2480 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2481  template <typename Dispatch>
2484  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2485  {
2486  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2487 
2488  d.vkDestroyShaderModule( m_device,
2489  static_cast<VkShaderModule>( shaderModule ),
2490  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2491  }
2492 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2493 
2494  template <typename Dispatch>
2496  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2497  VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
2498  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2499  {
2500  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2501  return static_cast<Result>( d.vkCreatePipelineCache( m_device,
2502  reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
2503  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2504  reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
2505  }
2506 
2507 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2508  template <typename Dispatch>
2512  Dispatch const & d ) const
2513  {
2514  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2515 
2517  VkResult result =
2518  d.vkCreatePipelineCache( m_device,
2519  reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
2520  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2521  reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
2522  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
2523 
2524  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineCache );
2525  }
2526 
2527 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2528  template <typename Dispatch>
2532  Dispatch const & d ) const
2533  {
2534  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2535 
2537  VkResult result =
2538  d.vkCreatePipelineCache( m_device,
2539  reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
2540  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2541  reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
2542  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" );
2543 
2544  return createResultValueType(
2545  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2547  }
2548 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2549 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2550 
2551  template <typename Dispatch>
2553  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2554  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2555  {
2556  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2557  d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2558  }
2559 
2560 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2561  template <typename Dispatch>
2564  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2565  {
2566  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2567 
2568  d.vkDestroyPipelineCache( m_device,
2569  static_cast<VkPipelineCache>( pipelineCache ),
2570  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2571  }
2572 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2573 
2574  template <typename Dispatch>
2576  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2577  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2578  {
2579  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2580  d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2581  }
2582 
2583 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2584  template <typename Dispatch>
2587  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2588  {
2589  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2590 
2591  d.vkDestroyPipelineCache( m_device,
2592  static_cast<VkPipelineCache>( pipelineCache ),
2593  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2594  }
2595 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2596 
2597  template <typename Dispatch>
2599  size_t * pDataSize,
2600  void * pData,
2601  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2602  {
2603  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2604  return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
2605  }
2606 
2607 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2608  template <typename Uint8_tAllocator, typename Dispatch>
2610  Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
2611  {
2612  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2613 
2614  std::vector<uint8_t, Uint8_tAllocator> data;
2615  size_t dataSize;
2616  VkResult result;
2617  do
2618  {
2619  result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
2620  if ( ( result == VK_SUCCESS ) && dataSize )
2621  {
2622  data.resize( dataSize );
2623  result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
2624  }
2625  } while ( result == VK_INCOMPLETE );
2626  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
2627  VULKAN_HPP_ASSERT( dataSize <= data.size() );
2628  if ( dataSize < data.size() )
2629  {
2630  data.resize( dataSize );
2631  }
2632  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
2633  }
2634 
2635  template <typename Uint8_tAllocator,
2636  typename Dispatch,
2637  typename B1,
2640  Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
2641  {
2642  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2643 
2644  std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
2645  size_t dataSize;
2646  VkResult result;
2647  do
2648  {
2649  result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
2650  if ( ( result == VK_SUCCESS ) && dataSize )
2651  {
2652  data.resize( dataSize );
2653  result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
2654  }
2655  } while ( result == VK_INCOMPLETE );
2656  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
2657  VULKAN_HPP_ASSERT( dataSize <= data.size() );
2658  if ( dataSize < data.size() )
2659  {
2660  data.resize( dataSize );
2661  }
2662  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
2663  }
2664 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2665 
2666  template <typename Dispatch>
2668  uint32_t srcCacheCount,
2669  const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
2670  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2671  {
2672  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2673  return static_cast<Result>(
2674  d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
2675  }
2676 
2677 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2678  template <typename Dispatch>
2682  Dispatch const & d ) const
2683  {
2684  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2685 
2686  VkResult result = d.vkMergePipelineCaches(
2687  m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) );
2688  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
2689 
2690  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
2691  }
2692 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2693 
2694  template <typename Dispatch>
2696  uint32_t createInfoCount,
2698  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2699  VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
2700  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2701  {
2702  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2703  return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device,
2704  static_cast<VkPipelineCache>( pipelineCache ),
2705  createInfoCount,
2706  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
2707  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2708  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
2709  }
2710 
2711 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2712  template <typename PipelineAllocator, typename Dispatch>
2717  Dispatch const & d ) const
2718  {
2719  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2720 
2721  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
2722  VkResult result = d.vkCreateGraphicsPipelines(
2723  m_device,
2724  static_cast<VkPipelineCache>( pipelineCache ),
2725  createInfos.size(),
2726  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
2727  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2728  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2729  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2730  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
2732 
2734  }
2735 
2736  template <typename PipelineAllocator,
2737  typename Dispatch,
2738  typename B0,
2744  PipelineAllocator & pipelineAllocator,
2745  Dispatch const & d ) const
2746  {
2747  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2748 
2749  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
2750  VkResult result = d.vkCreateGraphicsPipelines(
2751  m_device,
2752  static_cast<VkPipelineCache>( pipelineCache ),
2753  createInfos.size(),
2754  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
2755  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2756  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2757  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2758  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
2760 
2762  }
2763 
2764  template <typename Dispatch>
2769  Dispatch const & d ) const
2770  {
2771  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2772 
2774  VkResult result = d.vkCreateGraphicsPipelines(
2775  m_device,
2776  static_cast<VkPipelineCache>( pipelineCache ),
2777  1,
2778  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
2779  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2780  reinterpret_cast<VkPipeline *>( &pipeline ) );
2781  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2782  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline",
2784 
2786  }
2787 
2788 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2789  template <typename Dispatch, typename PipelineAllocator>
2794  Dispatch const & d ) const
2795  {
2796  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2797 
2798  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
2799  VkResult result = d.vkCreateGraphicsPipelines(
2800  m_device,
2801  static_cast<VkPipelineCache>( pipelineCache ),
2802  createInfos.size(),
2803  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
2804  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2805  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2806  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2807  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
2809  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
2810  uniquePipelines.reserve( createInfos.size() );
2811  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
2812  for ( auto const & pipeline : pipelines )
2813  {
2814  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
2815  }
2817  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
2818  }
2819 
2820  template <typename Dispatch,
2821  typename PipelineAllocator,
2822  typename B0,
2823  typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
2828  PipelineAllocator & pipelineAllocator,
2829  Dispatch const & d ) const
2830  {
2831  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2832 
2833  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
2834  VkResult result = d.vkCreateGraphicsPipelines(
2835  m_device,
2836  static_cast<VkPipelineCache>( pipelineCache ),
2837  createInfos.size(),
2838  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
2839  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2840  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2841  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2842  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
2844  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
2845  uniquePipelines.reserve( createInfos.size() );
2846  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
2847  for ( auto const & pipeline : pipelines )
2848  {
2849  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
2850  }
2852  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
2853  }
2854 
2855  template <typename Dispatch>
2860  Dispatch const & d ) const
2861  {
2862  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2863 
2865  VkResult result = d.vkCreateGraphicsPipelines(
2866  m_device,
2867  static_cast<VkPipelineCache>( pipelineCache ),
2868  1,
2869  reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
2870  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2871  reinterpret_cast<VkPipeline *>( &pipeline ) );
2872  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2873  VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique",
2875 
2877  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2879  }
2880 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
2881 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2882 
2883  template <typename Dispatch>
2885  uint32_t createInfoCount,
2887  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
2888  VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
2889  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2890  {
2891  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2892  return static_cast<Result>( d.vkCreateComputePipelines( m_device,
2893  static_cast<VkPipelineCache>( pipelineCache ),
2894  createInfoCount,
2895  reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
2896  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2897  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
2898  }
2899 
2900 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2901  template <typename PipelineAllocator, typename Dispatch>
2906  Dispatch const & d ) const
2907  {
2908  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2909 
2910  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
2911  VkResult result = d.vkCreateComputePipelines(
2912  m_device,
2913  static_cast<VkPipelineCache>( pipelineCache ),
2914  createInfos.size(),
2915  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
2916  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2917  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2918  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2919  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
2921 
2923  }
2924 
2925  template <typename PipelineAllocator,
2926  typename Dispatch,
2927  typename B0,
2933  PipelineAllocator & pipelineAllocator,
2934  Dispatch const & d ) const
2935  {
2936  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2937 
2938  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
2939  VkResult result = d.vkCreateComputePipelines(
2940  m_device,
2941  static_cast<VkPipelineCache>( pipelineCache ),
2942  createInfos.size(),
2943  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
2944  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2945  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2946  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2947  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
2949 
2951  }
2952 
2953  template <typename Dispatch>
2958  Dispatch const & d ) const
2959  {
2960  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2961 
2963  VkResult result = d.vkCreateComputePipelines(
2964  m_device,
2965  static_cast<VkPipelineCache>( pipelineCache ),
2966  1,
2967  reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
2968  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2969  reinterpret_cast<VkPipeline *>( &pipeline ) );
2970  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2971  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline",
2973 
2975  }
2976 
2977 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2978  template <typename Dispatch, typename PipelineAllocator>
2983  Dispatch const & d ) const
2984  {
2985  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2986 
2987  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
2988  VkResult result = d.vkCreateComputePipelines(
2989  m_device,
2990  static_cast<VkPipelineCache>( pipelineCache ),
2991  createInfos.size(),
2992  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
2993  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2994  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2995  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2996  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
2998  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
2999  uniquePipelines.reserve( createInfos.size() );
3000  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
3001  for ( auto const & pipeline : pipelines )
3002  {
3003  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
3004  }
3006  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
3007  }
3008 
3009  template <typename Dispatch,
3010  typename PipelineAllocator,
3011  typename B0,
3012  typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
3017  PipelineAllocator & pipelineAllocator,
3018  Dispatch const & d ) const
3019  {
3020  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3021 
3022  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
3023  VkResult result = d.vkCreateComputePipelines(
3024  m_device,
3025  static_cast<VkPipelineCache>( pipelineCache ),
3026  createInfos.size(),
3027  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
3028  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3029  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
3030  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3031  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
3033  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
3034  uniquePipelines.reserve( createInfos.size() );
3035  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
3036  for ( auto const & pipeline : pipelines )
3037  {
3038  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
3039  }
3041  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
3042  }
3043 
3044  template <typename Dispatch>
3049  Dispatch const & d ) const
3050  {
3051  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3052 
3054  VkResult result = d.vkCreateComputePipelines(
3055  m_device,
3056  static_cast<VkPipelineCache>( pipelineCache ),
3057  1,
3058  reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
3059  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3060  reinterpret_cast<VkPipeline *>( &pipeline ) );
3061  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3062  VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique",
3064 
3066  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3068  }
3069 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3070 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3071 
3072  template <typename Dispatch>
3074  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3075  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3076  {
3077  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3078  d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3079  }
3080 
3081 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3082  template <typename Dispatch>
3085  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3086  {
3087  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3088 
3089  d.vkDestroyPipeline( m_device,
3090  static_cast<VkPipeline>( pipeline ),
3091  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3092  }
3093 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3094 
3095  template <typename Dispatch>
3097  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3098  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3099  {
3100  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3101  d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3102  }
3103 
3104 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3105  template <typename Dispatch>
3108  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3109  {
3110  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3111 
3112  d.vkDestroyPipeline( m_device,
3113  static_cast<VkPipeline>( pipeline ),
3114  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3115  }
3116 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3117 
3118  template <typename Dispatch>
3120  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3121  VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
3122  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3123  {
3124  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3125  return static_cast<Result>( d.vkCreatePipelineLayout( m_device,
3126  reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
3127  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3128  reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
3129  }
3130 
3131 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3132  template <typename Dispatch>
3136  Dispatch const & d ) const
3137  {
3138  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3139 
3140  VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
3141  VkResult result =
3142  d.vkCreatePipelineLayout( m_device,
3143  reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
3144  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3145  reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
3146  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
3147 
3148  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineLayout );
3149  }
3150 
3151 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3152  template <typename Dispatch>
3156  Dispatch const & d ) const
3157  {
3158  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3159 
3160  VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
3161  VkResult result =
3162  d.vkCreatePipelineLayout( m_device,
3163  reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
3164  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3165  reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
3166  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" );
3167 
3168  return createResultValueType(
3169  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3171  }
3172 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3173 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3174 
3175  template <typename Dispatch>
3177  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3178  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3179  {
3180  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3181  d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3182  }
3183 
3184 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3185  template <typename Dispatch>
3188  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3189  {
3190  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3191 
3192  d.vkDestroyPipelineLayout( m_device,
3193  static_cast<VkPipelineLayout>( pipelineLayout ),
3194  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3195  }
3196 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3197 
3198  template <typename Dispatch>
3200  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3201  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3202  {
3203  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3204  d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3205  }
3206 
3207 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3208  template <typename Dispatch>
3211  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3212  {
3213  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3214 
3215  d.vkDestroyPipelineLayout( m_device,
3216  static_cast<VkPipelineLayout>( pipelineLayout ),
3217  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3218  }
3219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3220 
3221  template <typename Dispatch>
3223  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3224  VULKAN_HPP_NAMESPACE::Sampler * pSampler,
3225  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3226  {
3227  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3228  return static_cast<Result>( d.vkCreateSampler( m_device,
3229  reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ),
3230  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3231  reinterpret_cast<VkSampler *>( pSampler ) ) );
3232  }
3233 
3234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3235  template <typename Dispatch>
3237  const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
3238  {
3239  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3240 
3242  VkResult result =
3243  d.vkCreateSampler( m_device,
3244  reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
3245  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3246  reinterpret_cast<VkSampler *>( &sampler ) );
3247  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
3248 
3249  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sampler );
3250  }
3251 
3252 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3253  template <typename Dispatch>
3255  const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
3256  {
3257  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3258 
3260  VkResult result =
3261  d.vkCreateSampler( m_device,
3262  reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
3263  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3264  reinterpret_cast<VkSampler *>( &sampler ) );
3265  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" );
3266 
3267  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3269  }
3270 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3271 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3272 
3273  template <typename Dispatch>
3275  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3276  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3277  {
3278  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3279  d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3280  }
3281 
3282 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3283  template <typename Dispatch>
3286  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3287  {
3288  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3289 
3290  d.vkDestroySampler( m_device,
3291  static_cast<VkSampler>( sampler ),
3292  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3293  }
3294 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3295 
3296  template <typename Dispatch>
3298  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3299  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3300  {
3301  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3302  d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3303  }
3304 
3305 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3306  template <typename Dispatch>
3309  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3310  {
3311  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3312 
3313  d.vkDestroySampler( m_device,
3314  static_cast<VkSampler>( sampler ),
3315  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3316  }
3317 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3318 
3319  template <typename Dispatch>
3321  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3323  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3324  {
3325  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3326  return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device,
3327  reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
3328  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3329  reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
3330  }
3331 
3332 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3333  template <typename Dispatch>
3337  Dispatch const & d ) const
3338  {
3339  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3340 
3342  VkResult result = d.vkCreateDescriptorSetLayout(
3343  m_device,
3344  reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
3345  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3346  reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
3347  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
3348 
3349  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), setLayout );
3350  }
3351 
3352 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3353  template <typename Dispatch>
3357  Dispatch const & d ) const
3358  {
3359  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3360 
3362  VkResult result = d.vkCreateDescriptorSetLayout(
3363  m_device,
3364  reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
3365  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3366  reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
3367  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" );
3368 
3369  return createResultValueType(
3370  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3372  }
3373 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3374 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3375 
3376  template <typename Dispatch>
3378  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3379  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3380  {
3381  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3382  d.vkDestroyDescriptorSetLayout(
3383  m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3384  }
3385 
3386 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3387  template <typename Dispatch>
3390  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3391  {
3392  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3393 
3394  d.vkDestroyDescriptorSetLayout(
3395  m_device,
3396  static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
3397  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3398  }
3399 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3400 
3401  template <typename Dispatch>
3403  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3404  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3405  {
3406  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3407  d.vkDestroyDescriptorSetLayout(
3408  m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3409  }
3410 
3411 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3412  template <typename Dispatch>
3415  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3416  {
3417  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3418 
3419  d.vkDestroyDescriptorSetLayout(
3420  m_device,
3421  static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
3422  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3423  }
3424 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3425 
3426  template <typename Dispatch>
3428  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3429  VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
3430  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3431  {
3432  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3433  return static_cast<Result>( d.vkCreateDescriptorPool( m_device,
3434  reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
3435  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3436  reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
3437  }
3438 
3439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3440  template <typename Dispatch>
3444  Dispatch const & d ) const
3445  {
3446  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3447 
3448  VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
3449  VkResult result =
3450  d.vkCreateDescriptorPool( m_device,
3451  reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
3452  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3453  reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
3454  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
3455 
3456  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorPool );
3457  }
3458 
3459 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3460  template <typename Dispatch>
3464  Dispatch const & d ) const
3465  {
3466  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3467 
3468  VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
3469  VkResult result =
3470  d.vkCreateDescriptorPool( m_device,
3471  reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
3472  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3473  reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
3474  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" );
3475 
3476  return createResultValueType(
3477  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3479  }
3480 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3481 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3482 
3483  template <typename Dispatch>
3485  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3486  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3487  {
3488  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3489  d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3490  }
3491 
3492 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3493  template <typename Dispatch>
3496  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3497  {
3498  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3499 
3500  d.vkDestroyDescriptorPool( m_device,
3501  static_cast<VkDescriptorPool>( descriptorPool ),
3502  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3503  }
3504 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3505 
3506  template <typename Dispatch>
3508  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3509  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3510  {
3511  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3512  d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3513  }
3514 
3515 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3516  template <typename Dispatch>
3519  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3520  {
3521  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3522 
3523  d.vkDestroyDescriptorPool( m_device,
3524  static_cast<VkDescriptorPool>( descriptorPool ),
3525  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3526  }
3527 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3528 
3529 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
3530  template <typename Dispatch>
3533  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3534  {
3535  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3536  return static_cast<Result>(
3537  d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
3538  }
3539 #else
3540  template <typename Dispatch>
3543  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3544  {
3545  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3546 
3547  d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
3548  }
3549 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
3550 
3551  template <typename Dispatch>
3553  VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
3554  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3555  {
3556  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3557  return static_cast<Result>( d.vkAllocateDescriptorSets(
3558  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
3559  }
3560 
3561 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3562  template <typename DescriptorSetAllocator, typename Dispatch>
3565  {
3566  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3567 
3568  std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
3569  VkResult result = d.vkAllocateDescriptorSets(
3570  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
3571  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
3572 
3573  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
3574  }
3575 
3576  template <typename DescriptorSetAllocator,
3577  typename Dispatch,
3578  typename B0,
3582  DescriptorSetAllocator & descriptorSetAllocator,
3583  Dispatch const & d ) const
3584  {
3585  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3586 
3587  std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
3588  VkResult result = d.vkAllocateDescriptorSets(
3589  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
3590  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
3591 
3592  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
3593  }
3594 
3595 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3596  template <typename Dispatch, typename DescriptorSetAllocator>
3600  {
3601  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3602 
3603  std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
3604  VkResult result = d.vkAllocateDescriptorSets(
3605  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
3606  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
3607  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
3608  uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
3609  PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
3610  for ( auto const & descriptorSet : descriptorSets )
3611  {
3612  uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
3613  }
3614  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
3615  }
3616 
3617  template <typename Dispatch,
3618  typename DescriptorSetAllocator,
3619  typename B0,
3620  typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type>
3624  DescriptorSetAllocator & descriptorSetAllocator,
3625  Dispatch const & d ) const
3626  {
3627  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3628 
3629  std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
3630  VkResult result = d.vkAllocateDescriptorSets(
3631  m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
3632  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
3633  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
3634  uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
3635  PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
3636  for ( auto const & descriptorSet : descriptorSets )
3637  {
3638  uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
3639  }
3640  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
3641  }
3642 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3643 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3644 
3645  template <typename Dispatch>
3647  uint32_t descriptorSetCount,
3648  const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
3649  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3650  {
3651  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3652  return static_cast<Result>( d.vkFreeDescriptorSets(
3653  m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
3654  }
3655 
3656 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3657  template <typename Dispatch>
3660  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3661  {
3662  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3663 
3664  d.vkFreeDescriptorSets(
3665  m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
3666  }
3667 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3668 
3669  template <typename Dispatch>
3671  uint32_t descriptorSetCount,
3672  const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
3673  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3674  {
3675  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3676  return static_cast<Result>( d.vkFreeDescriptorSets(
3677  m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
3678  }
3679 
3680 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3681  template <typename Dispatch>
3684  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3685  {
3686  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3687 
3688  d.vkFreeDescriptorSets(
3689  m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
3690  }
3691 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3692 
3693  template <typename Dispatch>
3694  VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount,
3695  const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
3696  uint32_t descriptorCopyCount,
3697  const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
3698  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3699  {
3700  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3701  d.vkUpdateDescriptorSets( m_device,
3702  descriptorWriteCount,
3703  reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ),
3704  descriptorCopyCount,
3705  reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
3706  }
3707 
3708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3709  template <typename Dispatch>
3710  VULKAN_HPP_INLINE void
3713  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3714  {
3715  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3716 
3717  d.vkUpdateDescriptorSets( m_device,
3718  descriptorWrites.size(),
3719  reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
3720  descriptorCopies.size(),
3721  reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
3722  }
3723 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3724 
3725  template <typename Dispatch>
3727  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3728  VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
3729  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3730  {
3731  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3732  return static_cast<Result>( d.vkCreateFramebuffer( m_device,
3733  reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ),
3734  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3735  reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
3736  }
3737 
3738 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3739  template <typename Dispatch>
3743  Dispatch const & d ) const
3744  {
3745  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3746 
3748  VkResult result =
3749  d.vkCreateFramebuffer( m_device,
3750  reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
3751  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3752  reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
3753  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
3754 
3755  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), framebuffer );
3756  }
3757 
3758 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3759  template <typename Dispatch>
3763  Dispatch const & d ) const
3764  {
3765  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3766 
3768  VkResult result =
3769  d.vkCreateFramebuffer( m_device,
3770  reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
3771  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3772  reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
3773  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" );
3774 
3775  return createResultValueType(
3776  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3778  }
3779 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3780 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3781 
3782  template <typename Dispatch>
3784  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3785  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3786  {
3787  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3788  d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3789  }
3790 
3791 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3792  template <typename Dispatch>
3795  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3796  {
3797  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3798 
3799  d.vkDestroyFramebuffer( m_device,
3800  static_cast<VkFramebuffer>( framebuffer ),
3801  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3802  }
3803 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3804 
3805  template <typename Dispatch>
3807  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3808  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3809  {
3810  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3811  d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3812  }
3813 
3814 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3815  template <typename Dispatch>
3818  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3819  {
3820  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3821 
3822  d.vkDestroyFramebuffer( m_device,
3823  static_cast<VkFramebuffer>( framebuffer ),
3824  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3825  }
3826 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3827 
3828  template <typename Dispatch>
3830  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3831  VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
3832  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3833  {
3834  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3835  return static_cast<Result>( d.vkCreateRenderPass( m_device,
3836  reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ),
3837  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3838  reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
3839  }
3840 
3841 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3842  template <typename Dispatch>
3846  Dispatch const & d ) const
3847  {
3848  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3849 
3851  VkResult result =
3852  d.vkCreateRenderPass( m_device,
3853  reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
3854  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3855  reinterpret_cast<VkRenderPass *>( &renderPass ) );
3856  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
3857 
3858  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
3859  }
3860 
3861 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3862  template <typename Dispatch>
3866  Dispatch const & d ) const
3867  {
3868  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3869 
3871  VkResult result =
3872  d.vkCreateRenderPass( m_device,
3873  reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
3874  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3875  reinterpret_cast<VkRenderPass *>( &renderPass ) );
3876  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" );
3877 
3878  return createResultValueType(
3879  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3881  }
3882 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
3883 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3884 
3885  template <typename Dispatch>
3887  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3888  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3889  {
3890  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3891  d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3892  }
3893 
3894 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3895  template <typename Dispatch>
3898  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3899  {
3900  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3901 
3902  d.vkDestroyRenderPass( m_device,
3903  static_cast<VkRenderPass>( renderPass ),
3904  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3905  }
3906 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3907 
3908  template <typename Dispatch>
3910  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3911  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3912  {
3913  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3914  d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3915  }
3916 
3917 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3918  template <typename Dispatch>
3921  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3922  {
3923  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3924 
3925  d.vkDestroyRenderPass( m_device,
3926  static_cast<VkRenderPass>( renderPass ),
3927  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3928  }
3929 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3930 
3931  template <typename Dispatch>
3933  VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
3934  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3935  {
3936  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3937  d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
3938  }
3939 
3940 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3941  template <typename Dispatch>
3943  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3944  {
3945  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3946 
3947  VULKAN_HPP_NAMESPACE::Extent2D granularity;
3948  d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
3949 
3950  return granularity;
3951  }
3952 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3953 
3954  template <typename Dispatch>
3956  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
3957  VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
3958  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3959  {
3960  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3961  return static_cast<Result>( d.vkCreateCommandPool( m_device,
3962  reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ),
3963  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3964  reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
3965  }
3966 
3967 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3968  template <typename Dispatch>
3972  Dispatch const & d ) const
3973  {
3974  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3975 
3977  VkResult result =
3978  d.vkCreateCommandPool( m_device,
3979  reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
3980  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3981  reinterpret_cast<VkCommandPool *>( &commandPool ) );
3982  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
3983 
3984  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandPool );
3985  }
3986 
3987 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3988  template <typename Dispatch>
3992  Dispatch const & d ) const
3993  {
3994  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3995 
3997  VkResult result =
3998  d.vkCreateCommandPool( m_device,
3999  reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
4000  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
4001  reinterpret_cast<VkCommandPool *>( &commandPool ) );
4002  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" );
4003 
4004  return createResultValueType(
4005  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
4007  }
4008 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
4009 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4010 
4011  template <typename Dispatch>
4013  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4014  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4015  {
4016  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4017  d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4018  }
4019 
4020 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4021  template <typename Dispatch>
4024  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4025  {
4026  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4027 
4028  d.vkDestroyCommandPool( m_device,
4029  static_cast<VkCommandPool>( commandPool ),
4030  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4031  }
4032 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4033 
4034  template <typename Dispatch>
4036  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
4037  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4038  {
4039  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4040  d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4041  }
4042 
4043 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4044  template <typename Dispatch>
4047  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4048  {
4049  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4050 
4051  d.vkDestroyCommandPool( m_device,
4052  static_cast<VkCommandPool>( commandPool ),
4053  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4054  }
4055 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4056 
4057 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4058  template <typename Dispatch>
4061  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4062  {
4063  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4064  return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
4065  }
4066 #else
4067  template <typename Dispatch>
4070  {
4071  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4072 
4073  VkResult result = d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
4074  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
4075 
4076  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
4077  }
4078 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
4079 
4080  template <typename Dispatch>
4082  VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
4083  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4084  {
4085  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4086  return static_cast<Result>( d.vkAllocateCommandBuffers(
4087  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
4088  }
4089 
4090 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4091  template <typename CommandBufferAllocator, typename Dispatch>
4094  {
4095  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4096 
4097  std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
4098  VkResult result = d.vkAllocateCommandBuffers(
4099  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
4100  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
4101 
4102  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
4103  }
4104 
4105  template <typename CommandBufferAllocator,
4106  typename Dispatch,
4107  typename B0,
4111  CommandBufferAllocator & commandBufferAllocator,
4112  Dispatch const & d ) const
4113  {
4114  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4115 
4116  std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
4117  VkResult result = d.vkAllocateCommandBuffers(
4118  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
4119  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
4120 
4121  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
4122  }
4123 
4124 # ifndef VULKAN_HPP_NO_SMART_HANDLE
4125  template <typename Dispatch, typename CommandBufferAllocator>
4129  {
4130  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4131 
4132  std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
4133  VkResult result = d.vkAllocateCommandBuffers(
4134  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
4135  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
4136  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
4137  uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
4138  PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
4139  for ( auto const & commandBuffer : commandBuffers )
4140  {
4141  uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
4142  }
4143  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
4144  }
4145 
4146  template <typename Dispatch,
4147  typename CommandBufferAllocator,
4148  typename B0,
4149  typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type>
4153  CommandBufferAllocator & commandBufferAllocator,
4154  Dispatch const & d ) const
4155  {
4156  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4157 
4158  std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
4159  VkResult result = d.vkAllocateCommandBuffers(
4160  m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
4161  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
4162  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
4163  uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
4164  PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
4165  for ( auto const & commandBuffer : commandBuffers )
4166  {
4167  uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
4168  }
4169  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
4170  }
4171 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
4172 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4173 
4174  template <typename Dispatch>
4176  uint32_t commandBufferCount,
4177  const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
4178  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4179  {
4180  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4181  d.vkFreeCommandBuffers(
4182  m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
4183  }
4184 
4185 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4186  template <typename Dispatch>
4189  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4190  {
4191  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4192 
4193  d.vkFreeCommandBuffers(
4194  m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
4195  }
4196 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4197 
4198  template <typename Dispatch>
4200  uint32_t commandBufferCount,
4201  const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
4202  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4203  {
4204  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4205  d.vkFreeCommandBuffers(
4206  m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
4207  }
4208 
4209 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4210  template <typename Dispatch>
4213  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4214  {
4215  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4216 
4217  d.vkFreeCommandBuffers(
4218  m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
4219  }
4220 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4221 
4222  template <typename Dispatch>
4224  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4225  {
4226  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4227  return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
4228  }
4229 
4230 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4231  template <typename Dispatch>
4233  CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
4234  {
4235  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4236 
4237  VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
4238  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
4239 
4240  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
4241  }
4242 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4243 
4244 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4245  template <typename Dispatch>
4247  {
4248  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4249  return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
4250  }
4251 #else
4252  template <typename Dispatch>
4254  {
4255  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4256 
4257  VkResult result = d.vkEndCommandBuffer( m_commandBuffer );
4258  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
4259 
4260  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
4261  }
4262 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
4263 
4264 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4265  template <typename Dispatch>
4267  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4268  {
4269  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4270  return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
4271  }
4272 #else
4273  template <typename Dispatch>
4275  {
4276  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4277 
4278  VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) );
4279  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
4280 
4281  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
4282  }
4283 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
4284 
4285  template <typename Dispatch>
4288  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4289  {
4290  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4291  d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
4292  }
4293 
4294  template <typename Dispatch>
4295  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
4296  uint32_t viewportCount,
4297  const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
4298  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4299  {
4300  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4301  d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
4302  }
4303 
4304 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4305  template <typename Dispatch>
4306  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
4308  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4309  {
4310  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4311 
4312  d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
4313  }
4314 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4315 
4316  template <typename Dispatch>
4317  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
4318  uint32_t scissorCount,
4319  const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
4320  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4321  {
4322  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4323  d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
4324  }
4325 
4326 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4327  template <typename Dispatch>
4328  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
4330  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4331  {
4332  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4333 
4334  d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
4335  }
4336 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4337 
4338  template <typename Dispatch>
4339  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4340  {
4341  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4342  d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
4343  }
4344 
4345  template <typename Dispatch>
4346  VULKAN_HPP_INLINE void
4347  CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4348  {
4349  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4350  d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
4351  }
4352 
4353  template <typename Dispatch>
4354  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4355  {
4356  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4357  d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
4358  }
4359 
4360  template <typename Dispatch>
4361  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4362  {
4363  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4364  d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
4365  }
4366 
4367  template <typename Dispatch>
4368  VULKAN_HPP_INLINE void
4370  {
4371  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4372  d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
4373  }
4374 
4375  template <typename Dispatch>
4376  VULKAN_HPP_INLINE void
4378  {
4379  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4380  d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
4381  }
4382 
4383  template <typename Dispatch>
4384  VULKAN_HPP_INLINE void
4386  {
4387  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4388  d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
4389  }
4390 
4391  template <typename Dispatch>
4394  uint32_t firstSet,
4395  uint32_t descriptorSetCount,
4396  const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
4397  uint32_t dynamicOffsetCount,
4398  const uint32_t * pDynamicOffsets,
4399  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4400  {
4401  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4402  d.vkCmdBindDescriptorSets( m_commandBuffer,
4403  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
4404  static_cast<VkPipelineLayout>( layout ),
4405  firstSet,
4406  descriptorSetCount,
4407  reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ),
4408  dynamicOffsetCount,
4409  pDynamicOffsets );
4410  }
4411 
4412 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4413  template <typename Dispatch>
4416  uint32_t firstSet,
4418  VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,
4419  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4420  {
4421  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4422 
4423  d.vkCmdBindDescriptorSets( m_commandBuffer,
4424  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
4425  static_cast<VkPipelineLayout>( layout ),
4426  firstSet,
4427  descriptorSets.size(),
4428  reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ),
4429  dynamicOffsets.size(),
4430  dynamicOffsets.data() );
4431  }
4432 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4433 
4434  template <typename Dispatch>
4438  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4439  {
4440  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4441  d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
4442  }
4443 
4444  template <typename Dispatch>
4446  uint32_t bindingCount,
4447  const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
4448  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
4449  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4450  {
4451  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4452  d.vkCmdBindVertexBuffers(
4453  m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
4454  }
4455 
4456 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4457  template <typename Dispatch>
4461  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
4462  {
4463  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4464 # ifdef VULKAN_HPP_NO_EXCEPTIONS
4465  VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
4466 # else
4467  if ( buffers.size() != offsets.size() )
4468  {
4469  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
4470  }
4471 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
4472 
4473  d.vkCmdBindVertexBuffers( m_commandBuffer,
4474  firstBinding,
4475  buffers.size(),
4476  reinterpret_cast<const VkBuffer *>( buffers.data() ),
4477  reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
4478  }
4479 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4480 
4481  template <typename Dispatch>
4483  uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4484  {
4485  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4486  d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
4487  }
4488 
4489  template <typename Dispatch>
4491  uint32_t instanceCount,
4492  uint32_t firstIndex,
4493  int32_t vertexOffset,
4494  uint32_t firstInstance,
4495  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4496  {
4497  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4498  d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
4499  }
4500 
4501  template <typename Dispatch>
4504  uint32_t drawCount,
4505  uint32_t stride,
4506  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4507  {
4508  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4509  d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
4510  }
4511 
4512  template <typename Dispatch>
4515  uint32_t drawCount,
4516  uint32_t stride,
4517  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4518  {
4519  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4520  d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
4521  }
4522 
4523  template <typename Dispatch>
4524  VULKAN_HPP_INLINE void
4525  CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4526  {
4527  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4528  d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
4529  }
4530 
4531  template <typename Dispatch>
4534  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4535  {
4536  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4537  d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
4538  }
4539 
4540  template <typename Dispatch>
4542  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
4543  uint32_t regionCount,
4544  const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
4545  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4546  {
4547  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4548  d.vkCmdCopyBuffer( m_commandBuffer,
4549  static_cast<VkBuffer>( srcBuffer ),
4550  static_cast<VkBuffer>( dstBuffer ),
4551  regionCount,
4552  reinterpret_cast<const VkBufferCopy *>( pRegions ) );
4553  }
4554 
4555 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4556  template <typename Dispatch>
4558  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
4560  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4561  {
4562  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4563 
4564  d.vkCmdCopyBuffer( m_commandBuffer,
4565  static_cast<VkBuffer>( srcBuffer ),
4566  static_cast<VkBuffer>( dstBuffer ),
4567  regions.size(),
4568  reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
4569  }
4570 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4571 
4572  template <typename Dispatch>
4574  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
4575  VULKAN_HPP_NAMESPACE::Image dstImage,
4576  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
4577  uint32_t regionCount,
4578  const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
4579  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4580  {
4581  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4582  d.vkCmdCopyImage( m_commandBuffer,
4583  static_cast<VkImage>( srcImage ),
4584  static_cast<VkImageLayout>( srcImageLayout ),
4585  static_cast<VkImage>( dstImage ),
4586  static_cast<VkImageLayout>( dstImageLayout ),
4587  regionCount,
4588  reinterpret_cast<const VkImageCopy *>( pRegions ) );
4589  }
4590 
4591 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4592  template <typename Dispatch>
4594  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
4595  VULKAN_HPP_NAMESPACE::Image dstImage,
4596  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
4598  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4599  {
4600  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4601 
4602  d.vkCmdCopyImage( m_commandBuffer,
4603  static_cast<VkImage>( srcImage ),
4604  static_cast<VkImageLayout>( srcImageLayout ),
4605  static_cast<VkImage>( dstImage ),
4606  static_cast<VkImageLayout>( dstImageLayout ),
4607  regions.size(),
4608  reinterpret_cast<const VkImageCopy *>( regions.data() ) );
4609  }
4610 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4611 
4612  template <typename Dispatch>
4614  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
4615  VULKAN_HPP_NAMESPACE::Image dstImage,
4616  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
4617  uint32_t regionCount,
4618  const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,
4620  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4621  {
4622  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4623  d.vkCmdBlitImage( m_commandBuffer,
4624  static_cast<VkImage>( srcImage ),
4625  static_cast<VkImageLayout>( srcImageLayout ),
4626  static_cast<VkImage>( dstImage ),
4627  static_cast<VkImageLayout>( dstImageLayout ),
4628  regionCount,
4629  reinterpret_cast<const VkImageBlit *>( pRegions ),
4630  static_cast<VkFilter>( filter ) );
4631  }
4632 
4633 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4634  template <typename Dispatch>
4636  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
4637  VULKAN_HPP_NAMESPACE::Image dstImage,
4638  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
4641  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4642  {
4643  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4644 
4645  d.vkCmdBlitImage( m_commandBuffer,
4646  static_cast<VkImage>( srcImage ),
4647  static_cast<VkImageLayout>( srcImageLayout ),
4648  static_cast<VkImage>( dstImage ),
4649  static_cast<VkImageLayout>( dstImageLayout ),
4650  regions.size(),
4651  reinterpret_cast<const VkImageBlit *>( regions.data() ),
4652  static_cast<VkFilter>( filter ) );
4653  }
4654 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4655 
4656  template <typename Dispatch>
4658  VULKAN_HPP_NAMESPACE::Image dstImage,
4659  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
4660  uint32_t regionCount,
4661  const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
4662  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4663  {
4664  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4665  d.vkCmdCopyBufferToImage( m_commandBuffer,
4666  static_cast<VkBuffer>( srcBuffer ),
4667  static_cast<VkImage>( dstImage ),
4668  static_cast<VkImageLayout>( dstImageLayout ),
4669  regionCount,
4670  reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
4671  }
4672 
4673 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4674  template <typename Dispatch>
4676  VULKAN_HPP_NAMESPACE::Image dstImage,
4677  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
4679  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4680  {
4681  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4682 
4683  d.vkCmdCopyBufferToImage( m_commandBuffer,
4684  static_cast<VkBuffer>( srcBuffer ),
4685  static_cast<VkImage>( dstImage ),
4686  static_cast<VkImageLayout>( dstImageLayout ),
4687  regions.size(),
4688  reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
4689  }
4690 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4691 
4692  template <typename Dispatch>
4694  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
4695  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
4696  uint32_t regionCount,
4697  const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
4698  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4699  {
4700  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4701  d.vkCmdCopyImageToBuffer( m_commandBuffer,
4702  static_cast<VkImage>( srcImage ),
4703  static_cast<VkImageLayout>( srcImageLayout ),
4704  static_cast<VkBuffer>( dstBuffer ),
4705  regionCount,
4706  reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
4707  }
4708 
4709 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4710  template <typename Dispatch>
4712  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
4713  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
4715  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4716  {
4717  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4718 
4719  d.vkCmdCopyImageToBuffer( m_commandBuffer,
4720  static_cast<VkImage>( srcImage ),
4721  static_cast<VkImageLayout>( srcImageLayout ),
4722  static_cast<VkBuffer>( dstBuffer ),
4723  regions.size(),
4724  reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
4725  }
4726 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4727 
4728  template <typename Dispatch>
4732  const void * pData,
4733  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4734  {
4735  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4736  d.vkCmdUpdateBuffer(
4737  m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
4738  }
4739 
4740 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4741  template <typename DataType, typename Dispatch>
4745  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4746  {
4747  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4748 
4749  d.vkCmdUpdateBuffer( m_commandBuffer,
4750  static_cast<VkBuffer>( dstBuffer ),
4751  static_cast<VkDeviceSize>( dstOffset ),
4752  data.size() * sizeof( DataType ),
4753  reinterpret_cast<const void *>( data.data() ) );
4754  }
4755 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4756 
4757  template <typename Dispatch>
4761  uint32_t data,
4762  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4763  {
4764  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4765  d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
4766  }
4767 
4768  template <typename Dispatch>
4772  uint32_t rangeCount,
4774  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4775  {
4776  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4777  d.vkCmdClearColorImage( m_commandBuffer,
4778  static_cast<VkImage>( image ),
4779  static_cast<VkImageLayout>( imageLayout ),
4780  reinterpret_cast<const VkClearColorValue *>( pColor ),
4781  rangeCount,
4782  reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
4783  }
4784 
4785 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4786  template <typename Dispatch>
4791  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4792  {
4793  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4794 
4795  d.vkCmdClearColorImage( m_commandBuffer,
4796  static_cast<VkImage>( image ),
4797  static_cast<VkImageLayout>( imageLayout ),
4798  reinterpret_cast<const VkClearColorValue *>( &color ),
4799  ranges.size(),
4800  reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
4801  }
4802 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4803 
4804  template <typename Dispatch>
4807  const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
4808  uint32_t rangeCount,
4810  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4811  {
4812  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4813  d.vkCmdClearDepthStencilImage( m_commandBuffer,
4814  static_cast<VkImage>( image ),
4815  static_cast<VkImageLayout>( imageLayout ),
4816  reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ),
4817  rangeCount,
4818  reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
4819  }
4820 
4821 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4822  template <typename Dispatch>
4823  VULKAN_HPP_INLINE void
4826  const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
4828  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4829  {
4830  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4831 
4832  d.vkCmdClearDepthStencilImage( m_commandBuffer,
4833  static_cast<VkImage>( image ),
4834  static_cast<VkImageLayout>( imageLayout ),
4835  reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
4836  ranges.size(),
4837  reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
4838  }
4839 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4840 
4841  template <typename Dispatch>
4842  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount,
4843  const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
4844  uint32_t rectCount,
4845  const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
4846  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4847  {
4848  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4849  d.vkCmdClearAttachments( m_commandBuffer,
4850  attachmentCount,
4851  reinterpret_cast<const VkClearAttachment *>( pAttachments ),
4852  rectCount,
4853  reinterpret_cast<const VkClearRect *>( pRects ) );
4854  }
4855 
4856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4857  template <typename Dispatch>
4860  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4861  {
4862  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4863 
4864  d.vkCmdClearAttachments( m_commandBuffer,
4865  attachments.size(),
4866  reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
4867  rects.size(),
4868  reinterpret_cast<const VkClearRect *>( rects.data() ) );
4869  }
4870 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4871 
4872  template <typename Dispatch>
4874  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
4875  VULKAN_HPP_NAMESPACE::Image dstImage,
4876  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
4877  uint32_t regionCount,
4878  const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
4879  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4880  {
4881  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4882  d.vkCmdResolveImage( m_commandBuffer,
4883  static_cast<VkImage>( srcImage ),
4884  static_cast<VkImageLayout>( srcImageLayout ),
4885  static_cast<VkImage>( dstImage ),
4886  static_cast<VkImageLayout>( dstImageLayout ),
4887  regionCount,
4888  reinterpret_cast<const VkImageResolve *>( pRegions ) );
4889  }
4890 
4891 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4892  template <typename Dispatch>
4894  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
4895  VULKAN_HPP_NAMESPACE::Image dstImage,
4896  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
4898  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4899  {
4900  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4901 
4902  d.vkCmdResolveImage( m_commandBuffer,
4903  static_cast<VkImage>( srcImage ),
4904  static_cast<VkImageLayout>( srcImageLayout ),
4905  static_cast<VkImage>( dstImage ),
4906  static_cast<VkImageLayout>( dstImageLayout ),
4907  regions.size(),
4908  reinterpret_cast<const VkImageResolve *>( regions.data() ) );
4909  }
4910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4911 
4912  template <typename Dispatch>
4915  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4916  {
4917  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4918  d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
4919  }
4920 
4921  template <typename Dispatch>
4924  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4925  {
4926  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4927  d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
4928  }
4929 
4930  template <typename Dispatch>
4932  const VULKAN_HPP_NAMESPACE::Event * pEvents,
4935  uint32_t memoryBarrierCount,
4936  const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
4937  uint32_t bufferMemoryBarrierCount,
4938  const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
4939  uint32_t imageMemoryBarrierCount,
4940  const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
4941  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4942  {
4943  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4944  d.vkCmdWaitEvents( m_commandBuffer,
4945  eventCount,
4946  reinterpret_cast<const VkEvent *>( pEvents ),
4947  static_cast<VkPipelineStageFlags>( srcStageMask ),
4948  static_cast<VkPipelineStageFlags>( dstStageMask ),
4949  memoryBarrierCount,
4950  reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
4951  bufferMemoryBarrierCount,
4952  reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
4953  imageMemoryBarrierCount,
4954  reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
4955  }
4956 
4957 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4958  template <typename Dispatch>
4959  VULKAN_HPP_INLINE void
4966  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4967  {
4968  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4969 
4970  d.vkCmdWaitEvents( m_commandBuffer,
4971  events.size(),
4972  reinterpret_cast<const VkEvent *>( events.data() ),
4973  static_cast<VkPipelineStageFlags>( srcStageMask ),
4974  static_cast<VkPipelineStageFlags>( dstStageMask ),
4975  memoryBarriers.size(),
4976  reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
4977  bufferMemoryBarriers.size(),
4978  reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
4979  imageMemoryBarriers.size(),
4980  reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
4981  }
4982 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4983 
4984  template <typename Dispatch>
4987  VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
4988  uint32_t memoryBarrierCount,
4989  const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
4990  uint32_t bufferMemoryBarrierCount,
4991  const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
4992  uint32_t imageMemoryBarrierCount,
4993  const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
4994  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4995  {
4996  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4997  d.vkCmdPipelineBarrier( m_commandBuffer,
4998  static_cast<VkPipelineStageFlags>( srcStageMask ),
4999  static_cast<VkPipelineStageFlags>( dstStageMask ),
5000  static_cast<VkDependencyFlags>( dependencyFlags ),
5001  memoryBarrierCount,
5002  reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
5003  bufferMemoryBarrierCount,
5004  reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
5005  imageMemoryBarrierCount,
5006  reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
5007  }
5008 
5009 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5010  template <typename Dispatch>
5011  VULKAN_HPP_INLINE void
5014  VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
5018  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5019  {
5020  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5021 
5022  d.vkCmdPipelineBarrier( m_commandBuffer,
5023  static_cast<VkPipelineStageFlags>( srcStageMask ),
5024  static_cast<VkPipelineStageFlags>( dstStageMask ),
5025  static_cast<VkDependencyFlags>( dependencyFlags ),
5026  memoryBarriers.size(),
5027  reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
5028  bufferMemoryBarriers.size(),
5029  reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
5030  imageMemoryBarriers.size(),
5031  reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
5032  }
5033 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5034 
5035  template <typename Dispatch>
5037  uint32_t query,
5039  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5040  {
5041  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5042  d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
5043  }
5044 
5045  template <typename Dispatch>
5047  {
5048  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5049  d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
5050  }
5051 
5052  template <typename Dispatch>
5054  uint32_t firstQuery,
5055  uint32_t queryCount,
5056  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5057  {
5058  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5059  d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
5060  }
5061 
5062  template <typename Dispatch>
5065  uint32_t query,
5066  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5067  {
5068  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5069  d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
5070  }
5071 
5072  template <typename Dispatch>
5074  uint32_t firstQuery,
5075  uint32_t queryCount,
5076  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
5080  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5081  {
5082  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5083  d.vkCmdCopyQueryPoolResults( m_commandBuffer,
5084  static_cast<VkQueryPool>( queryPool ),
5085  firstQuery,
5086  queryCount,
5087  static_cast<VkBuffer>( dstBuffer ),
5088  static_cast<VkDeviceSize>( dstOffset ),
5089  static_cast<VkDeviceSize>( stride ),
5090  static_cast<VkQueryResultFlags>( flags ) );
5091  }
5092 
5093  template <typename Dispatch>
5096  uint32_t offset,
5097  uint32_t size,
5098  const void * pValues,
5099  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5100  {
5101  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5102  d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
5103  }
5104 
5105 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5106  template <typename ValuesType, typename Dispatch>
5109  uint32_t offset,
5111  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5112  {
5113  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5114 
5115  d.vkCmdPushConstants( m_commandBuffer,
5116  static_cast<VkPipelineLayout>( layout ),
5117  static_cast<VkShaderStageFlags>( stageFlags ),
5118  offset,
5119  values.size() * sizeof( ValuesType ),
5120  reinterpret_cast<const void *>( values.data() ) );
5121  }
5122 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5123 
5124  template <typename Dispatch>
5127  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5128  {
5129  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5130  d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
5131  }
5132 
5133 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5134  template <typename Dispatch>
5137  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5138  {
5139  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5140 
5141  d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
5142  }
5143 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5144 
5145  template <typename Dispatch>
5147  {
5148  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5149  d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
5150  }
5151 
5152  template <typename Dispatch>
5154  {
5155  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5156  d.vkCmdEndRenderPass( m_commandBuffer );
5157  }
5158 
5159  template <typename Dispatch>
5160  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount,
5161  const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
5162  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5163  {
5164  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5165  d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
5166  }
5167 
5168 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5169  template <typename Dispatch>
5171  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5172  {
5173  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5174 
5175  d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
5176  }
5177 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5178 
5179  //=== VK_VERSION_1_1 ===
5180 
5181  template <typename Dispatch>
5183  {
5184  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5185  return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
5186  }
5187 
5188 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5189  template <typename Dispatch>
5191  {
5192  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5193 
5194  uint32_t apiVersion;
5195  VkResult result = d.vkEnumerateInstanceVersion( &apiVersion );
5196  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
5197 
5198  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), apiVersion );
5199  }
5200 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5201 
5202  template <typename Dispatch>
5204  const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
5205  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5206  {
5207  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5208  return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
5209  }
5210 
5211 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5212  template <typename Dispatch>
5215  {
5216  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5217 
5218  VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
5219  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
5220 
5221  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
5222  }
5223 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5224 
5225  template <typename Dispatch>
5227  const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
5228  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5229  {
5230  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5231  return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
5232  }
5233 
5234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5235  template <typename Dispatch>
5238  {
5239  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5240 
5241  VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
5242  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
5243 
5244  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
5245  }
5246 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5247 
5248  template <typename Dispatch>
5250  uint32_t localDeviceIndex,
5251  uint32_t remoteDeviceIndex,
5252  VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
5253  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5254  {
5255  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5256  d.vkGetDeviceGroupPeerMemoryFeatures(
5257  m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
5258  }
5259 
5260 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5261  template <typename Dispatch>
5263  uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5264  {
5265  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5266 
5268  d.vkGetDeviceGroupPeerMemoryFeatures(
5269  m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
5270 
5271  return peerMemoryFeatures;
5272  }
5273 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5274 
5275  template <typename Dispatch>
5276  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5277  {
5278  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5279  d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
5280  }
5281 
5282  template <typename Dispatch>
5284  uint32_t baseGroupY,
5285  uint32_t baseGroupZ,
5286  uint32_t groupCountX,
5287  uint32_t groupCountY,
5288  uint32_t groupCountZ,
5289  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5290  {
5291  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5292  d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
5293  }
5294 
5295  template <typename Dispatch>
5297  Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount,
5298  VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
5299  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5300  {
5301  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5302  return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
5303  m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
5304  }
5305 
5306 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5307  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
5310  Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
5311  {
5312  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5313 
5314  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
5315  uint32_t physicalDeviceGroupCount;
5316  VkResult result;
5317  do
5318  {
5319  result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
5320  if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
5321  {
5322  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5323  result = d.vkEnumeratePhysicalDeviceGroups(
5324  m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
5325  }
5326  } while ( result == VK_INCOMPLETE );
5327  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
5328  VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
5329  if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
5330  {
5331  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5332  }
5333  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
5334  }
5335 
5336  template <typename PhysicalDeviceGroupPropertiesAllocator,
5337  typename Dispatch,
5338  typename B1,
5342  Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
5343  {
5344  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5345 
5346  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
5347  physicalDeviceGroupPropertiesAllocator );
5348  uint32_t physicalDeviceGroupCount;
5349  VkResult result;
5350  do
5351  {
5352  result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
5353  if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
5354  {
5355  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5356  result = d.vkEnumeratePhysicalDeviceGroups(
5357  m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
5358  }
5359  } while ( result == VK_INCOMPLETE );
5360  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
5361  VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
5362  if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
5363  {
5364  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5365  }
5366  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
5367  }
5368 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5369 
5370  template <typename Dispatch>
5372  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
5373  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5374  {
5375  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5376  d.vkGetImageMemoryRequirements2(
5377  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
5378  }
5379 
5380 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5381  template <typename Dispatch>
5384  {
5385  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5386 
5387  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
5388  d.vkGetImageMemoryRequirements2(
5389  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5390 
5391  return memoryRequirements;
5392  }
5393 
5394  template <typename X, typename Y, typename... Z, typename Dispatch>
5397  {
5398  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5399 
5400  StructureChain<X, Y, Z...> structureChain;
5401  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
5402  d.vkGetImageMemoryRequirements2(
5403  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5404 
5405  return structureChain;
5406  }
5407 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5408 
5409  template <typename Dispatch>
5411  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
5412  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5413  {
5414  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5415  d.vkGetBufferMemoryRequirements2(
5416  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
5417  }
5418 
5419 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5420  template <typename Dispatch>
5423  {
5424  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5425 
5426  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
5427  d.vkGetBufferMemoryRequirements2(
5428  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5429 
5430  return memoryRequirements;
5431  }
5432 
5433  template <typename X, typename Y, typename... Z, typename Dispatch>
5436  {
5437  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5438 
5439  StructureChain<X, Y, Z...> structureChain;
5440  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
5441  d.vkGetBufferMemoryRequirements2(
5442  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5443 
5444  return structureChain;
5445  }
5446 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5447 
5448  template <typename Dispatch>
5450  uint32_t * pSparseMemoryRequirementCount,
5451  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
5452  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5453  {
5454  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5455  d.vkGetImageSparseMemoryRequirements2( m_device,
5456  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
5457  pSparseMemoryRequirementCount,
5458  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
5459  }
5460 
5461 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5462  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
5463  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
5465  {
5466  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5467 
5468  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
5469  uint32_t sparseMemoryRequirementCount;
5470  d.vkGetImageSparseMemoryRequirements2(
5471  m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
5472  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5473  d.vkGetImageSparseMemoryRequirements2( m_device,
5474  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
5475  &sparseMemoryRequirementCount,
5476  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
5477 
5478  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
5479  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
5480  {
5481  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5482  }
5483  return sparseMemoryRequirements;
5484  }
5485 
5486  template <typename SparseImageMemoryRequirements2Allocator,
5487  typename Dispatch,
5488  typename B1,
5490  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
5492  SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
5493  Dispatch const & d ) const
5494  {
5495  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5496 
5497  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
5498  sparseImageMemoryRequirements2Allocator );
5499  uint32_t sparseMemoryRequirementCount;
5500  d.vkGetImageSparseMemoryRequirements2(
5501  m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
5502  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5503  d.vkGetImageSparseMemoryRequirements2( m_device,
5504  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
5505  &sparseMemoryRequirementCount,
5506  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
5507 
5508  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
5509  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
5510  {
5511  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5512  }
5513  return sparseMemoryRequirements;
5514  }
5515 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5516 
5517  template <typename Dispatch>
5519  {
5520  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5521  d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
5522  }
5523 
5524 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5525  template <typename Dispatch>
5528  {
5529  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5530 
5532  d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
5533 
5534  return features;
5535  }
5536 
5537  template <typename X, typename Y, typename... Z, typename Dispatch>
5539  {
5540  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5541 
5542  StructureChain<X, Y, Z...> structureChain;
5543  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
5544  d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
5545 
5546  return structureChain;
5547  }
5548 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5549 
5550  template <typename Dispatch>
5552  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5553  {
5554  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5555  d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
5556  }
5557 
5558 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5559  template <typename Dispatch>
5562  {
5563  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5564 
5566  d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
5567 
5568  return properties;
5569  }
5570 
5571  template <typename X, typename Y, typename... Z, typename Dispatch>
5573  {
5574  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5575 
5576  StructureChain<X, Y, Z...> structureChain;
5577  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
5578  d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
5579 
5580  return structureChain;
5581  }
5582 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5583 
5584  template <typename Dispatch>
5586  VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
5587  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5588  {
5589  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5590  d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
5591  }
5592 
5593 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5594  template <typename Dispatch>
5597  {
5598  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5599 
5600  VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
5601  d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
5602 
5603  return formatProperties;
5604  }
5605 
5606  template <typename X, typename Y, typename... Z, typename Dispatch>
5608  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5609  {
5610  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5611 
5612  StructureChain<X, Y, Z...> structureChain;
5613  VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
5614  d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
5615 
5616  return structureChain;
5617  }
5618 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5619 
5620  template <typename Dispatch>
5623  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
5624  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5625  {
5626  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5627  return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
5628  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
5629  reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
5630  }
5631 
5632 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5633  template <typename Dispatch>
5636  {
5637  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5638 
5639  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
5640  VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
5641  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
5642  reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
5643  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
5644 
5645  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
5646  }
5647 
5648  template <typename X, typename Y, typename... Z, typename Dispatch>
5651  {
5652  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5653 
5654  StructureChain<X, Y, Z...> structureChain;
5655  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
5656  VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
5657  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
5658  reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
5659  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
5660 
5661  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
5662  }
5663 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5664 
5665  template <typename Dispatch>
5666  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
5667  VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
5668  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5669  {
5670  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5671  d.vkGetPhysicalDeviceQueueFamilyProperties2(
5672  m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
5673  }
5674 
5675 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5676  template <typename QueueFamilyProperties2Allocator, typename Dispatch>
5677  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
5678  PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
5679  {
5680  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5681 
5682  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
5683  uint32_t queueFamilyPropertyCount;
5684  d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
5685  queueFamilyProperties.resize( queueFamilyPropertyCount );
5686  d.vkGetPhysicalDeviceQueueFamilyProperties2(
5687  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
5688 
5689  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
5690  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
5691  {
5692  queueFamilyProperties.resize( queueFamilyPropertyCount );
5693  }
5694  return queueFamilyProperties;
5695  }
5696 
5697  template <typename QueueFamilyProperties2Allocator,
5698  typename Dispatch,
5699  typename B1,
5701  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
5702  PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
5703  {
5704  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5705 
5706  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
5707  uint32_t queueFamilyPropertyCount;
5708  d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
5709  queueFamilyProperties.resize( queueFamilyPropertyCount );
5710  d.vkGetPhysicalDeviceQueueFamilyProperties2(
5711  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
5712 
5713  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
5714  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
5715  {
5716  queueFamilyProperties.resize( queueFamilyPropertyCount );
5717  }
5718  return queueFamilyProperties;
5719  }
5720 
5721  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
5722  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
5723  PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
5724  {
5725  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5726 
5727  std::vector<StructureChain, StructureChainAllocator> structureChains;
5728  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
5729  uint32_t queueFamilyPropertyCount;
5730  d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
5731  structureChains.resize( queueFamilyPropertyCount );
5732  queueFamilyProperties.resize( queueFamilyPropertyCount );
5733  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
5734  {
5735  queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
5736  }
5737  d.vkGetPhysicalDeviceQueueFamilyProperties2(
5738  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
5739 
5740  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
5741  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
5742  {
5743  structureChains.resize( queueFamilyPropertyCount );
5744  }
5745  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
5746  {
5747  structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
5748  }
5749  return structureChains;
5750  }
5751 
5752  template <typename StructureChain,
5753  typename StructureChainAllocator,
5754  typename Dispatch,
5755  typename B1,
5757  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
5758  PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
5759  {
5760  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5761 
5762  std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
5763  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
5764  uint32_t queueFamilyPropertyCount;
5765  d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
5766  structureChains.resize( queueFamilyPropertyCount );
5767  queueFamilyProperties.resize( queueFamilyPropertyCount );
5768  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
5769  {
5770  queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
5771  }
5772  d.vkGetPhysicalDeviceQueueFamilyProperties2(
5773  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
5774 
5775  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
5776  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
5777  {
5778  structureChains.resize( queueFamilyPropertyCount );
5779  }
5780  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
5781  {
5782  structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
5783  }
5784  return structureChains;
5785  }
5786 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5787 
5788  template <typename Dispatch>
5790  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5791  {
5792  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5793  d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
5794  }
5795 
5796 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5797  template <typename Dispatch>
5800  {
5801  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5802 
5804  d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
5805 
5806  return memoryProperties;
5807  }
5808 
5809  template <typename X, typename Y, typename... Z, typename Dispatch>
5811  {
5812  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5813 
5814  StructureChain<X, Y, Z...> structureChain;
5816  structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
5817  d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
5818 
5819  return structureChain;
5820  }
5821 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5822 
5823  template <typename Dispatch>
5825  uint32_t * pPropertyCount,
5827  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5828  {
5829  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5830  d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
5831  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
5832  pPropertyCount,
5833  reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
5834  }
5835 
5836 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5837  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
5838  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
5840  {
5841  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5842 
5843  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
5844  uint32_t propertyCount;
5845  d.vkGetPhysicalDeviceSparseImageFormatProperties2(
5846  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
5847  properties.resize( propertyCount );
5848  d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
5849  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
5850  &propertyCount,
5851  reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
5852 
5853  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
5854  if ( propertyCount < properties.size() )
5855  {
5856  properties.resize( propertyCount );
5857  }
5858  return properties;
5859  }
5860 
5861  template <typename SparseImageFormatProperties2Allocator,
5862  typename Dispatch,
5863  typename B1,
5865  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
5867  SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
5868  Dispatch const & d ) const
5869  {
5870  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5871 
5872  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
5873  uint32_t propertyCount;
5874  d.vkGetPhysicalDeviceSparseImageFormatProperties2(
5875  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
5876  properties.resize( propertyCount );
5877  d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
5878  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
5879  &propertyCount,
5880  reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
5881 
5882  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
5883  if ( propertyCount < properties.size() )
5884  {
5885  properties.resize( propertyCount );
5886  }
5887  return properties;
5888  }
5889 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5890 
5891  template <typename Dispatch>
5894  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5895  {
5896  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5897  d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
5898  }
5899 
5900  template <typename Dispatch>
5902  VULKAN_HPP_NAMESPACE::Queue * pQueue,
5903  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5904  {
5905  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5906  d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
5907  }
5908 
5909 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5910  template <typename Dispatch>
5912  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5913  {
5914  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5915 
5917  d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
5918 
5919  return queue;
5920  }
5921 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5922 
5923  template <typename Dispatch>
5926  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
5928  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5929  {
5930  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5931  return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device,
5932  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
5933  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
5934  reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
5935  }
5936 
5937 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5938  template <typename Dispatch>
5942  Dispatch const & d ) const
5943  {
5944  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5945 
5947  VkResult result = d.vkCreateSamplerYcbcrConversion(
5948  m_device,
5949  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
5950  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
5951  reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
5952  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
5953 
5954  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
5955  }
5956 
5957 # ifndef VULKAN_HPP_NO_SMART_HANDLE
5958  template <typename Dispatch>
5962  Dispatch const & d ) const
5963  {
5964  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5965 
5967  VkResult result = d.vkCreateSamplerYcbcrConversion(
5968  m_device,
5969  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
5970  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
5971  reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
5972  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" );
5973 
5974  return createResultValueType(
5975  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
5977  }
5978 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
5979 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5980 
5981  template <typename Dispatch>
5983  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
5984  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5985  {
5986  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5987  d.vkDestroySamplerYcbcrConversion(
5988  m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
5989  }
5990 
5991 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5992  template <typename Dispatch>
5995  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5996  {
5997  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5998 
5999  d.vkDestroySamplerYcbcrConversion(
6000  m_device,
6001  static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
6002  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6003  }
6004 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6005 
6006  template <typename Dispatch>
6008  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6009  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6010  {
6011  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6012  d.vkDestroySamplerYcbcrConversion(
6013  m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6014  }
6015 
6016 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6017  template <typename Dispatch>
6020  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6021  {
6022  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6023 
6024  d.vkDestroySamplerYcbcrConversion(
6025  m_device,
6026  static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
6027  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6028  }
6029 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6030 
6031  template <typename Dispatch>
6034  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6035  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
6036  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6037  {
6038  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6039  return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device,
6040  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
6041  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
6042  reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
6043  }
6044 
6045 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6046  template <typename Dispatch>
6050  Dispatch const & d ) const
6051  {
6052  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6053 
6054  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
6055  VkResult result = d.vkCreateDescriptorUpdateTemplate(
6056  m_device,
6057  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
6058  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6059  reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
6060  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
6061 
6062  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
6063  }
6064 
6065 # ifndef VULKAN_HPP_NO_SMART_HANDLE
6066  template <typename Dispatch>
6070  Dispatch const & d ) const
6071  {
6072  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6073 
6074  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
6075  VkResult result = d.vkCreateDescriptorUpdateTemplate(
6076  m_device,
6077  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
6078  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6079  reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
6080  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" );
6081 
6082  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
6084  descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
6085  }
6086 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
6087 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6088 
6089  template <typename Dispatch>
6091  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6092  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6093  {
6094  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6095  d.vkDestroyDescriptorUpdateTemplate(
6096  m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6097  }
6098 
6099 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6100  template <typename Dispatch>
6103  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6104  {
6105  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6106 
6107  d.vkDestroyDescriptorUpdateTemplate(
6108  m_device,
6109  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
6110  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6111  }
6112 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6113 
6114  template <typename Dispatch>
6116  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6117  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6118  {
6119  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6120  d.vkDestroyDescriptorUpdateTemplate(
6121  m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6122  }
6123 
6124 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6125  template <typename Dispatch>
6128  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6129  {
6130  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6131 
6132  d.vkDestroyDescriptorUpdateTemplate(
6133  m_device,
6134  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
6135  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6136  }
6137 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6138 
6139  template <typename Dispatch>
6141  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
6142  const void * pData,
6143  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6144  {
6145  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6146  d.vkUpdateDescriptorSetWithTemplate(
6147  m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
6148  }
6149 
6150 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6151  template <typename DataType, typename Dispatch>
6153  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
6154  DataType const & data,
6155  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6156  {
6157  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6158 
6159  d.vkUpdateDescriptorSetWithTemplate( m_device,
6160  static_cast<VkDescriptorSet>( descriptorSet ),
6161  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
6162  reinterpret_cast<const void *>( &data ) );
6163  }
6164 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6165 
6166  template <typename Dispatch>
6168  VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
6169  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6170  {
6171  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6172  d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
6173  reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
6174  reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
6175  }
6176 
6177 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6178  template <typename Dispatch>
6181  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6182  {
6183  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6184 
6185  VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
6186  d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
6187  reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
6188  reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
6189 
6190  return externalBufferProperties;
6191  }
6192 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6193 
6194  template <typename Dispatch>
6196  VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
6197  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6198  {
6199  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6200  d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
6201  reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
6202  reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
6203  }
6204 
6205 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6206  template <typename Dispatch>
6209  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6210  {
6211  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6212 
6213  VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
6214  d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
6215  reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
6216  reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
6217 
6218  return externalFenceProperties;
6219  }
6220 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6221 
6222  template <typename Dispatch>
6223  VULKAN_HPP_INLINE void
6225  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
6226  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6227  {
6228  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6229  d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
6230  reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
6231  reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
6232  }
6233 
6234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6235  template <typename Dispatch>
6238  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6239  {
6240  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6241 
6242  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
6243  d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
6244  reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
6245  reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
6246 
6247  return externalSemaphoreProperties;
6248  }
6249 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6250 
6251  template <typename Dispatch>
6254  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6255  {
6256  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6257  d.vkGetDescriptorSetLayoutSupport(
6258  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
6259  }
6260 
6261 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6262  template <typename Dispatch>
6265  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6266  {
6267  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6268 
6270  d.vkGetDescriptorSetLayoutSupport(
6271  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
6272 
6273  return support;
6274  }
6275 
6276  template <typename X, typename Y, typename... Z, typename Dispatch>
6279  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6280  {
6281  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6282 
6283  StructureChain<X, Y, Z...> structureChain;
6284  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
6285  d.vkGetDescriptorSetLayoutSupport(
6286  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
6287 
6288  return structureChain;
6289  }
6290 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6291 
6292  //=== VK_VERSION_1_2 ===
6293 
6294  template <typename Dispatch>
6297  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
6298  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
6299  uint32_t maxDrawCount,
6300  uint32_t stride,
6301  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6302  {
6303  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6304  d.vkCmdDrawIndirectCount( m_commandBuffer,
6305  static_cast<VkBuffer>( buffer ),
6306  static_cast<VkDeviceSize>( offset ),
6307  static_cast<VkBuffer>( countBuffer ),
6308  static_cast<VkDeviceSize>( countBufferOffset ),
6309  maxDrawCount,
6310  stride );
6311  }
6312 
6313  template <typename Dispatch>
6316  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
6317  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
6318  uint32_t maxDrawCount,
6319  uint32_t stride,
6320  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6321  {
6322  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6323  d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
6324  static_cast<VkBuffer>( buffer ),
6325  static_cast<VkDeviceSize>( offset ),
6326  static_cast<VkBuffer>( countBuffer ),
6327  static_cast<VkDeviceSize>( countBufferOffset ),
6328  maxDrawCount,
6329  stride );
6330  }
6331 
6332  template <typename Dispatch>
6334  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6335  VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
6336  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6337  {
6338  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6339  return static_cast<Result>( d.vkCreateRenderPass2( m_device,
6340  reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
6341  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
6342  reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
6343  }
6344 
6345 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6346  template <typename Dispatch>
6350  Dispatch const & d ) const
6351  {
6352  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6353 
6355  VkResult result =
6356  d.vkCreateRenderPass2( m_device,
6357  reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
6358  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6359  reinterpret_cast<VkRenderPass *>( &renderPass ) );
6360  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
6361 
6362  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
6363  }
6364 
6365 # ifndef VULKAN_HPP_NO_SMART_HANDLE
6366  template <typename Dispatch>
6370  Dispatch const & d ) const
6371  {
6372  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6373 
6375  VkResult result =
6376  d.vkCreateRenderPass2( m_device,
6377  reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
6378  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6379  reinterpret_cast<VkRenderPass *>( &renderPass ) );
6380  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" );
6381 
6382  return createResultValueType(
6383  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
6385  }
6386 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
6387 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6388 
6389  template <typename Dispatch>
6391  const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
6392  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6393  {
6394  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6395  d.vkCmdBeginRenderPass2(
6396  m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
6397  }
6398 
6399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6400  template <typename Dispatch>
6402  const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
6403  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6404  {
6405  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6406 
6407  d.vkCmdBeginRenderPass2(
6408  m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
6409  }
6410 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6411 
6412  template <typename Dispatch>
6414  const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
6415  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6416  {
6417  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6418  d.vkCmdNextSubpass2(
6419  m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
6420  }
6421 
6422 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6423  template <typename Dispatch>
6425  const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
6426  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6427  {
6428  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6429 
6430  d.vkCmdNextSubpass2(
6431  m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
6432  }
6433 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6434 
6435  template <typename Dispatch>
6437  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6438  {
6439  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6440  d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
6441  }
6442 
6443 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6444  template <typename Dispatch>
6446  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6447  {
6448  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6449 
6450  d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
6451  }
6452 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6453 
6454  template <typename Dispatch>
6455  VULKAN_HPP_INLINE void
6456  Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6457  {
6458  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6459  d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
6460  }
6461 
6462  template <typename Dispatch>
6464  uint64_t * pValue,
6465  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6466  {
6467  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6468  return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
6469  }
6470 
6471 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6472  template <typename Dispatch>
6474  Dispatch const & d ) const
6475  {
6476  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6477 
6478  uint64_t value;
6479  VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value );
6480  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
6481 
6482  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
6483  }
6484 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6485 
6486  template <typename Dispatch>
6488  uint64_t timeout,
6489  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6490  {
6491  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6492  return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
6493  }
6494 
6495 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6496  template <typename Dispatch>
6498  Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
6499  {
6500  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6501 
6502  VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
6503  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
6504  VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
6506 
6507  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
6508  }
6509 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6510 
6511  template <typename Dispatch>
6513  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6514  {
6515  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6516  return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
6517  }
6518 
6519 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6520  template <typename Dispatch>
6522  Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
6523  {
6524  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6525 
6526  VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
6527  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
6528 
6529  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
6530  }
6531 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6532 
6533  template <typename Dispatch>
6535  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6536  {
6537  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6538  return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
6539  }
6540 
6541 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6542  template <typename Dispatch>
6544  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6545  {
6546  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6547 
6548  VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
6549 
6550  return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
6551  }
6552 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6553 
6554  template <typename Dispatch>
6556  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6557  {
6558  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6559  return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
6560  }
6561 
6562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6563  template <typename Dispatch>
6565  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6566  {
6567  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6568 
6569  uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
6570 
6571  return result;
6572  }
6573 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6574 
6575  template <typename Dispatch>
6577  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6578  {
6579  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6580  return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
6581  }
6582 
6583 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6584  template <typename Dispatch>
6586  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6587  {
6588  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6589 
6590  uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
6591 
6592  return result;
6593  }
6594 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6595 
6596  //=== VK_VERSION_1_3 ===
6597 
6598  template <typename Dispatch>
6601  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6602  {
6603  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6604  return static_cast<Result>(
6605  d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
6606  }
6607 
6608 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6609  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
6612  PhysicalDevice::getToolProperties( Dispatch const & d ) const
6613  {
6614  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6615 
6616  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
6617  uint32_t toolCount;
6618  VkResult result;
6619  do
6620  {
6621  result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
6622  if ( ( result == VK_SUCCESS ) && toolCount )
6623  {
6624  toolProperties.resize( toolCount );
6625  result =
6626  d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
6627  }
6628  } while ( result == VK_INCOMPLETE );
6629  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
6630  VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
6631  if ( toolCount < toolProperties.size() )
6632  {
6633  toolProperties.resize( toolCount );
6634  }
6635  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
6636  }
6637 
6638  template <typename PhysicalDeviceToolPropertiesAllocator,
6639  typename Dispatch,
6640  typename B1,
6644  PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
6645  {
6646  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6647 
6648  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
6649  physicalDeviceToolPropertiesAllocator );
6650  uint32_t toolCount;
6651  VkResult result;
6652  do
6653  {
6654  result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
6655  if ( ( result == VK_SUCCESS ) && toolCount )
6656  {
6657  toolProperties.resize( toolCount );
6658  result =
6659  d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
6660  }
6661  } while ( result == VK_INCOMPLETE );
6662  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
6663  VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
6664  if ( toolCount < toolProperties.size() )
6665  {
6666  toolProperties.resize( toolCount );
6667  }
6668  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
6669  }
6670 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6671 
6672  template <typename Dispatch>
6674  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6675  VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
6676  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6677  {
6678  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6679  return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device,
6680  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
6681  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
6682  reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
6683  }
6684 
6685 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6686  template <typename Dispatch>
6690  Dispatch const & d ) const
6691  {
6692  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6693 
6694  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
6695  VkResult result =
6696  d.vkCreatePrivateDataSlot( m_device,
6697  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
6698  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6699  reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
6700  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" );
6701 
6702  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
6703  }
6704 
6705 # ifndef VULKAN_HPP_NO_SMART_HANDLE
6706  template <typename Dispatch>
6710  Dispatch const & d ) const
6711  {
6712  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6713 
6714  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
6715  VkResult result =
6716  d.vkCreatePrivateDataSlot( m_device,
6717  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
6718  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6719  reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
6720  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" );
6721 
6722  return createResultValueType(
6723  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
6725  }
6726 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
6727 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6728 
6729  template <typename Dispatch>
6731  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6732  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6733  {
6734  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6735  d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6736  }
6737 
6738 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6739  template <typename Dispatch>
6742  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6743  {
6744  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6745 
6746  d.vkDestroyPrivateDataSlot(
6747  m_device,
6748  static_cast<VkPrivateDataSlot>( privateDataSlot ),
6749  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6750  }
6751 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6752 
6753  template <typename Dispatch>
6755  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
6756  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6757  {
6758  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6759  d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6760  }
6761 
6762 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6763  template <typename Dispatch>
6766  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6767  {
6768  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6769 
6770  d.vkDestroyPrivateDataSlot(
6771  m_device,
6772  static_cast<VkPrivateDataSlot>( privateDataSlot ),
6773  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6774  }
6775 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6776 
6777 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
6778  template <typename Dispatch>
6780  uint64_t objectHandle,
6781  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
6782  uint64_t data,
6783  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6784  {
6785  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6786  return static_cast<Result>(
6787  d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
6788  }
6789 #else
6790  template <typename Dispatch>
6792  uint64_t objectHandle,
6793  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
6794  uint64_t data,
6795  Dispatch const & d ) const
6796  {
6797  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6798 
6799  VkResult result =
6800  d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
6801  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
6802 
6803  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
6804  }
6805 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6806 
6807  template <typename Dispatch>
6809  uint64_t objectHandle,
6810  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
6811  uint64_t * pData,
6812  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6813  {
6814  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6815  d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
6816  }
6817 
6818 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6819  template <typename Dispatch>
6821  uint64_t objectHandle,
6822  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
6823  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6824  {
6825  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6826 
6827  uint64_t data;
6828  d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
6829 
6830  return data;
6831  }
6832 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6833 
6834  template <typename Dispatch>
6836  const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
6837  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6838  {
6839  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6840  d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
6841  }
6842 
6843 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6844  template <typename Dispatch>
6846  const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
6847  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6848  {
6849  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6850 
6851  d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
6852  }
6853 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6854 
6855  template <typename Dispatch>
6858  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6859  {
6860  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6861  d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
6862  }
6863 
6864  template <typename Dispatch>
6866  const VULKAN_HPP_NAMESPACE::Event * pEvents,
6867  const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
6868  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6869  {
6870  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6871  d.vkCmdWaitEvents2(
6872  m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
6873  }
6874 
6875 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6876  template <typename Dispatch>
6879  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
6880  {
6881  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6882 # ifdef VULKAN_HPP_NO_EXCEPTIONS
6883  VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
6884 # else
6885  if ( events.size() != dependencyInfos.size() )
6886  {
6887  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
6888  }
6889 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
6890 
6891  d.vkCmdWaitEvents2( m_commandBuffer,
6892  events.size(),
6893  reinterpret_cast<const VkEvent *>( events.data() ),
6894  reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
6895  }
6896 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6897 
6898  template <typename Dispatch>
6900  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6901  {
6902  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6903  d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
6904  }
6905 
6906 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6907  template <typename Dispatch>
6909  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6910  {
6911  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6912 
6913  d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
6914  }
6915 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6916 
6917  template <typename Dispatch>
6920  uint32_t query,
6921  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6922  {
6923  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6924  d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
6925  }
6926 
6927  template <typename Dispatch>
6929  const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
6931  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6932  {
6933  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6934  return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
6935  }
6936 
6937 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6938  template <typename Dispatch>
6941  {
6942  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6943 
6944  VkResult result = d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
6945  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
6946 
6947  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
6948  }
6949 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6950 
6951  template <typename Dispatch>
6953  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6954  {
6955  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6956  d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
6957  }
6958 
6959 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6960  template <typename Dispatch>
6962  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6963  {
6964  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6965 
6966  d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
6967  }
6968 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6969 
6970  template <typename Dispatch>
6972  {
6973  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6974  d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
6975  }
6976 
6977 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6978  template <typename Dispatch>
6980  {
6981  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6982 
6983  d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
6984  }
6985 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6986 
6987  template <typename Dispatch>
6989  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6990  {
6991  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6992  d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
6993  }
6994 
6995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6996  template <typename Dispatch>
6998  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6999  {
7000  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7001 
7002  d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
7003  }
7004 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7005 
7006  template <typename Dispatch>
7008  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7009  {
7010  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7011  d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
7012  }
7013 
7014 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7015  template <typename Dispatch>
7017  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7018  {
7019  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7020 
7021  d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
7022  }
7023 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7024 
7025  template <typename Dispatch>
7027  {
7028  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7029  d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
7030  }
7031 
7032 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7033  template <typename Dispatch>
7035  {
7036  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7037 
7038  d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
7039  }
7040 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7041 
7042  template <typename Dispatch>
7044  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7045  {
7046  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7047  d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
7048  }
7049 
7050 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7051  template <typename Dispatch>
7053  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7054  {
7055  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7056 
7057  d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
7058  }
7059 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7060 
7061  template <typename Dispatch>
7063  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7064  {
7065  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7066  d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
7067  }
7068 
7069 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7070  template <typename Dispatch>
7072  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7073  {
7074  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7075 
7076  d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
7077  }
7078 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7079 
7080  template <typename Dispatch>
7082  {
7083  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7084  d.vkCmdEndRendering( m_commandBuffer );
7085  }
7086 
7087  template <typename Dispatch>
7089  {
7090  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7091  d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
7092  }
7093 
7094  template <typename Dispatch>
7096  {
7097  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7098  d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
7099  }
7100 
7101  template <typename Dispatch>
7103  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7104  {
7105  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7106  d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
7107  }
7108 
7109  template <typename Dispatch>
7111  const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
7112  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7113  {
7114  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7115  d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
7116  }
7117 
7118 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7119  template <typename Dispatch>
7121  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7122  {
7123  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7124 
7125  d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
7126  }
7127 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7128 
7129  template <typename Dispatch>
7130  VULKAN_HPP_INLINE void
7131  CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7132  {
7133  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7134  d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
7135  }
7136 
7137 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7138  template <typename Dispatch>
7140  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7141  {
7142  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7143 
7144  d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
7145  }
7146 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7147 
7148  template <typename Dispatch>
7150  uint32_t bindingCount,
7151  const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
7152  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
7153  const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
7154  const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
7155  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7156  {
7157  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7158  d.vkCmdBindVertexBuffers2( m_commandBuffer,
7159  firstBinding,
7160  bindingCount,
7161  reinterpret_cast<const VkBuffer *>( pBuffers ),
7162  reinterpret_cast<const VkDeviceSize *>( pOffsets ),
7163  reinterpret_cast<const VkDeviceSize *>( pSizes ),
7164  reinterpret_cast<const VkDeviceSize *>( pStrides ) );
7165  }
7166 
7167 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7168  template <typename Dispatch>
7174  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
7175  {
7176  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7177 # ifdef VULKAN_HPP_NO_EXCEPTIONS
7178  VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
7179  VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
7180  VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
7181 # else
7182  if ( buffers.size() != offsets.size() )
7183  {
7184  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
7185  }
7186  if ( !sizes.empty() && buffers.size() != sizes.size() )
7187  {
7188  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
7189  }
7190  if ( !strides.empty() && buffers.size() != strides.size() )
7191  {
7192  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
7193  }
7194 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
7195 
7196  d.vkCmdBindVertexBuffers2( m_commandBuffer,
7197  firstBinding,
7198  buffers.size(),
7199  reinterpret_cast<const VkBuffer *>( buffers.data() ),
7200  reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
7201  reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
7202  reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
7203  }
7204 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7205 
7206  template <typename Dispatch>
7208  {
7209  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7210  d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
7211  }
7212 
7213  template <typename Dispatch>
7215  {
7216  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7217  d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
7218  }
7219 
7220  template <typename Dispatch>
7222  {
7223  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7224  d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
7225  }
7226 
7227  template <typename Dispatch>
7229  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7230  {
7231  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7232  d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
7233  }
7234 
7235  template <typename Dispatch>
7237  {
7238  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7239  d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
7240  }
7241 
7242  template <typename Dispatch>
7246  VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
7248  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7249  {
7250  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7251  d.vkCmdSetStencilOp( m_commandBuffer,
7252  static_cast<VkStencilFaceFlags>( faceMask ),
7253  static_cast<VkStencilOp>( failOp ),
7254  static_cast<VkStencilOp>( passOp ),
7255  static_cast<VkStencilOp>( depthFailOp ),
7256  static_cast<VkCompareOp>( compareOp ) );
7257  }
7258 
7259  template <typename Dispatch>
7261  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7262  {
7263  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7264  d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
7265  }
7266 
7267  template <typename Dispatch>
7269  {
7270  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7271  d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
7272  }
7273 
7274  template <typename Dispatch>
7276  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7277  {
7278  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7279  d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
7280  }
7281 
7282  template <typename Dispatch>
7284  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
7285  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7286  {
7287  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7288  d.vkGetDeviceBufferMemoryRequirements(
7289  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
7290  }
7291 
7292 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7293  template <typename Dispatch>
7296  {
7297  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7298 
7299  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
7300  d.vkGetDeviceBufferMemoryRequirements(
7301  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
7302 
7303  return memoryRequirements;
7304  }
7305 
7306  template <typename X, typename Y, typename... Z, typename Dispatch>
7309  {
7310  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7311 
7312  StructureChain<X, Y, Z...> structureChain;
7313  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
7314  d.vkGetDeviceBufferMemoryRequirements(
7315  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
7316 
7317  return structureChain;
7318  }
7319 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7320 
7321  template <typename Dispatch>
7323  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
7324  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7325  {
7326  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7327  d.vkGetDeviceImageMemoryRequirements(
7328  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
7329  }
7330 
7331 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7332  template <typename Dispatch>
7335  {
7336  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7337 
7338  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
7339  d.vkGetDeviceImageMemoryRequirements(
7340  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
7341 
7342  return memoryRequirements;
7343  }
7344 
7345  template <typename X, typename Y, typename... Z, typename Dispatch>
7348  {
7349  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7350 
7351  StructureChain<X, Y, Z...> structureChain;
7352  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
7353  d.vkGetDeviceImageMemoryRequirements(
7354  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
7355 
7356  return structureChain;
7357  }
7358 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7359 
7360  template <typename Dispatch>
7362  uint32_t * pSparseMemoryRequirementCount,
7363  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
7364  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7365  {
7366  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7367  d.vkGetDeviceImageSparseMemoryRequirements( m_device,
7368  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
7369  pSparseMemoryRequirementCount,
7370  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
7371  }
7372 
7373 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7374  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
7375  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
7377  {
7378  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7379 
7380  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
7381  uint32_t sparseMemoryRequirementCount;
7382  d.vkGetDeviceImageSparseMemoryRequirements(
7383  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
7384  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
7385  d.vkGetDeviceImageSparseMemoryRequirements( m_device,
7386  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
7387  &sparseMemoryRequirementCount,
7388  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
7389 
7390  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
7391  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
7392  {
7393  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
7394  }
7395  return sparseMemoryRequirements;
7396  }
7397 
7398  template <typename SparseImageMemoryRequirements2Allocator,
7399  typename Dispatch,
7400  typename B1,
7402  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
7404  SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
7405  Dispatch const & d ) const
7406  {
7407  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7408 
7409  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
7410  sparseImageMemoryRequirements2Allocator );
7411  uint32_t sparseMemoryRequirementCount;
7412  d.vkGetDeviceImageSparseMemoryRequirements(
7413  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
7414  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
7415  d.vkGetDeviceImageSparseMemoryRequirements( m_device,
7416  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
7417  &sparseMemoryRequirementCount,
7418  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
7419 
7420  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
7421  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
7422  {
7423  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
7424  }
7425  return sparseMemoryRequirements;
7426  }
7427 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7428 
7429  //=== VK_KHR_surface ===
7430 
7431  template <typename Dispatch>
7433  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
7434  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7435  {
7436  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7437  d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7438  }
7439 
7440 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7441  template <typename Dispatch>
7444  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7445  {
7446  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7447 
7448  d.vkDestroySurfaceKHR( m_instance,
7449  static_cast<VkSurfaceKHR>( surface ),
7450  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7451  }
7452 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7453 
7454  template <typename Dispatch>
7456  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
7457  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7458  {
7459  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7460  d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7461  }
7462 
7463 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7464  template <typename Dispatch>
7467  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7468  {
7469  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7470 
7471  d.vkDestroySurfaceKHR( m_instance,
7472  static_cast<VkSurfaceKHR>( surface ),
7473  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7474  }
7475 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7476 
7477  template <typename Dispatch>
7480  VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
7481  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7482  {
7483  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7484  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR(
7485  m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
7486  }
7487 
7488 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7489  template <typename Dispatch>
7491  PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
7492  {
7493  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7494 
7495  VULKAN_HPP_NAMESPACE::Bool32 supported;
7496  VkResult result = d.vkGetPhysicalDeviceSurfaceSupportKHR(
7497  m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
7498  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
7499 
7500  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), supported );
7501  }
7502 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7503 
7504  template <typename Dispatch>
7506  VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
7507  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7508  {
7509  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7510  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
7511  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
7512  }
7513 
7514 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7515  template <typename Dispatch>
7518  {
7519  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7520 
7522  VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
7523  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
7524  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
7525 
7526  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
7527  }
7528 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7529 
7530  template <typename Dispatch>
7532  uint32_t * pSurfaceFormatCount,
7533  VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
7534  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7535  {
7536  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7537  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
7538  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
7539  }
7540 
7541 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7542  template <typename SurfaceFormatKHRAllocator, typename Dispatch>
7545  {
7546  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7547 
7548  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
7549  uint32_t surfaceFormatCount;
7550  VkResult result;
7551  do
7552  {
7553  result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
7554  if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
7555  {
7556  surfaceFormats.resize( surfaceFormatCount );
7557  result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
7558  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
7559  }
7560  } while ( result == VK_INCOMPLETE );
7561  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
7562  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
7563  if ( surfaceFormatCount < surfaceFormats.size() )
7564  {
7565  surfaceFormats.resize( surfaceFormatCount );
7566  }
7567  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
7568  }
7569 
7570  template <typename SurfaceFormatKHRAllocator,
7571  typename Dispatch,
7572  typename B1,
7576  SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
7577  Dispatch const & d ) const
7578  {
7579  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7580 
7581  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
7582  uint32_t surfaceFormatCount;
7583  VkResult result;
7584  do
7585  {
7586  result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
7587  if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
7588  {
7589  surfaceFormats.resize( surfaceFormatCount );
7590  result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
7591  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
7592  }
7593  } while ( result == VK_INCOMPLETE );
7594  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
7595  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
7596  if ( surfaceFormatCount < surfaceFormats.size() )
7597  {
7598  surfaceFormats.resize( surfaceFormatCount );
7599  }
7600  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
7601  }
7602 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7603 
7604  template <typename Dispatch>
7606  uint32_t * pPresentModeCount,
7607  VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
7608  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7609  {
7610  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7611  return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
7612  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
7613  }
7614 
7615 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7616  template <typename PresentModeKHRAllocator, typename Dispatch>
7619  {
7620  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7621 
7622  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
7623  uint32_t presentModeCount;
7624  VkResult result;
7625  do
7626  {
7627  result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
7628  if ( ( result == VK_SUCCESS ) && presentModeCount )
7629  {
7630  presentModes.resize( presentModeCount );
7631  result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
7632  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
7633  }
7634  } while ( result == VK_INCOMPLETE );
7635  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
7636  VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
7637  if ( presentModeCount < presentModes.size() )
7638  {
7639  presentModes.resize( presentModeCount );
7640  }
7641  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
7642  }
7643 
7644  template <typename PresentModeKHRAllocator,
7645  typename Dispatch,
7646  typename B1,
7650  PresentModeKHRAllocator & presentModeKHRAllocator,
7651  Dispatch const & d ) const
7652  {
7653  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7654 
7655  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
7656  uint32_t presentModeCount;
7657  VkResult result;
7658  do
7659  {
7660  result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
7661  if ( ( result == VK_SUCCESS ) && presentModeCount )
7662  {
7663  presentModes.resize( presentModeCount );
7664  result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
7665  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
7666  }
7667  } while ( result == VK_INCOMPLETE );
7668  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
7669  VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
7670  if ( presentModeCount < presentModes.size() )
7671  {
7672  presentModes.resize( presentModeCount );
7673  }
7674  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
7675  }
7676 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7677 
7678  //=== VK_KHR_swapchain ===
7679 
7680  template <typename Dispatch>
7682  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
7684  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7685  {
7686  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7687  return static_cast<Result>( d.vkCreateSwapchainKHR( m_device,
7688  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
7689  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
7690  reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
7691  }
7692 
7693 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7694  template <typename Dispatch>
7698  Dispatch const & d ) const
7699  {
7700  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7701 
7703  VkResult result =
7704  d.vkCreateSwapchainKHR( m_device,
7705  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
7706  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
7707  reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
7708  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
7709 
7710  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
7711  }
7712 
7713 # ifndef VULKAN_HPP_NO_SMART_HANDLE
7714  template <typename Dispatch>
7718  Dispatch const & d ) const
7719  {
7720  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7721 
7723  VkResult result =
7724  d.vkCreateSwapchainKHR( m_device,
7725  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
7726  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
7727  reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
7728  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" );
7729 
7730  return createResultValueType(
7731  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
7733  }
7734 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
7735 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7736 
7737  template <typename Dispatch>
7739  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
7740  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7741  {
7742  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7743  d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7744  }
7745 
7746 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7747  template <typename Dispatch>
7750  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7751  {
7752  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7753 
7754  d.vkDestroySwapchainKHR( m_device,
7755  static_cast<VkSwapchainKHR>( swapchain ),
7756  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7757  }
7758 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7759 
7760  template <typename Dispatch>
7762  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
7763  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7764  {
7765  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7766  d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7767  }
7768 
7769 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7770  template <typename Dispatch>
7773  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7774  {
7775  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7776 
7777  d.vkDestroySwapchainKHR( m_device,
7778  static_cast<VkSwapchainKHR>( swapchain ),
7779  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7780  }
7781 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7782 
7783  template <typename Dispatch>
7785  uint32_t * pSwapchainImageCount,
7786  VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
7787  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7788  {
7789  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7790  return static_cast<Result>(
7791  d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
7792  }
7793 
7794 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7795  template <typename ImageAllocator, typename Dispatch>
7798  {
7799  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7800 
7801  std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages;
7802  uint32_t swapchainImageCount;
7803  VkResult result;
7804  do
7805  {
7806  result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
7807  if ( ( result == VK_SUCCESS ) && swapchainImageCount )
7808  {
7809  swapchainImages.resize( swapchainImageCount );
7810  result = d.vkGetSwapchainImagesKHR(
7811  m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
7812  }
7813  } while ( result == VK_INCOMPLETE );
7814  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
7815  VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
7816  if ( swapchainImageCount < swapchainImages.size() )
7817  {
7818  swapchainImages.resize( swapchainImageCount );
7819  }
7820  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
7821  }
7822 
7825  Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const
7826  {
7827  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7828 
7829  std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator );
7830  uint32_t swapchainImageCount;
7831  VkResult result;
7832  do
7833  {
7834  result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
7835  if ( ( result == VK_SUCCESS ) && swapchainImageCount )
7836  {
7837  swapchainImages.resize( swapchainImageCount );
7838  result = d.vkGetSwapchainImagesKHR(
7839  m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
7840  }
7841  } while ( result == VK_INCOMPLETE );
7842  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
7843  VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
7844  if ( swapchainImageCount < swapchainImages.size() )
7845  {
7846  swapchainImages.resize( swapchainImageCount );
7847  }
7848  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
7849  }
7850 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7851 
7852  template <typename Dispatch>
7854  uint64_t timeout,
7857  uint32_t * pImageIndex,
7858  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7859  {
7860  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7861  return static_cast<Result>( d.vkAcquireNextImageKHR(
7862  m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
7863  }
7864 
7865 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7866  template <typename Dispatch>
7868  uint64_t timeout,
7871  Dispatch const & d ) const
7872  {
7873  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7874 
7875  uint32_t imageIndex;
7876  VkResult result = d.vkAcquireNextImageKHR(
7877  m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex );
7878  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
7879  VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR",
7884 
7885  return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
7886  }
7887 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7888 
7889  template <typename Dispatch>
7891  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7892  {
7893  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7894  return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
7895  }
7896 
7897 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7898  template <typename Dispatch>
7900  Dispatch const & d ) const
7901  {
7902  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7903 
7904  VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
7905  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
7906  VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
7908 
7909  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
7910  }
7911 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7912 
7913  template <typename Dispatch>
7915  VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7916  {
7917  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7918  return static_cast<Result>(
7919  d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
7920  }
7921 
7922 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7923  template <typename Dispatch>
7925  Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
7926  {
7927  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7928 
7929  VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
7930  VkResult result =
7931  d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
7932  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
7933 
7934  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deviceGroupPresentCapabilities );
7935  }
7936 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7937 
7938  template <typename Dispatch>
7941  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7942  {
7943  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7944  return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR(
7945  m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
7946  }
7947 
7948 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7949  template <typename Dispatch>
7952  {
7953  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7954 
7956  VkResult result = d.vkGetDeviceGroupSurfacePresentModesKHR(
7957  m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
7958  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
7959 
7960  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
7961  }
7962 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7963 
7964  template <typename Dispatch>
7966  uint32_t * pRectCount,
7968  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7969  {
7970  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7971  return static_cast<Result>(
7972  d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
7973  }
7974 
7975 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7976  template <typename Rect2DAllocator, typename Dispatch>
7979  {
7980  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7981 
7982  std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects;
7983  uint32_t rectCount;
7984  VkResult result;
7985  do
7986  {
7987  result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
7988  if ( ( result == VK_SUCCESS ) && rectCount )
7989  {
7990  rects.resize( rectCount );
7991  result = d.vkGetPhysicalDevicePresentRectanglesKHR(
7992  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
7993  }
7994  } while ( result == VK_INCOMPLETE );
7995  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
7996  VULKAN_HPP_ASSERT( rectCount <= rects.size() );
7997  if ( rectCount < rects.size() )
7998  {
7999  rects.resize( rectCount );
8000  }
8001  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
8002  }
8003 
8006  PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const
8007  {
8008  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8009 
8010  std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator );
8011  uint32_t rectCount;
8012  VkResult result;
8013  do
8014  {
8015  result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
8016  if ( ( result == VK_SUCCESS ) && rectCount )
8017  {
8018  rects.resize( rectCount );
8019  result = d.vkGetPhysicalDevicePresentRectanglesKHR(
8020  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
8021  }
8022  } while ( result == VK_INCOMPLETE );
8023  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
8024  VULKAN_HPP_ASSERT( rectCount <= rects.size() );
8025  if ( rectCount < rects.size() )
8026  {
8027  rects.resize( rectCount );
8028  }
8029  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
8030  }
8031 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8032 
8033  template <typename Dispatch>
8035  uint32_t * pImageIndex,
8036  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8037  {
8038  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8039  return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
8040  }
8041 
8042 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8043  template <typename Dispatch>
8045  Dispatch const & d ) const
8046  {
8047  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8048 
8049  uint32_t imageIndex;
8050  VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
8051  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8052  VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
8057 
8058  return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
8059  }
8060 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8061 
8062  //=== VK_KHR_display ===
8063 
8064  template <typename Dispatch>
8067  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8068  {
8069  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8070  return static_cast<Result>(
8071  d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
8072  }
8073 
8074 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8075  template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
8077  PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
8078  {
8079  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8080 
8081  std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
8082  uint32_t propertyCount;
8083  VkResult result;
8084  do
8085  {
8086  result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
8087  if ( ( result == VK_SUCCESS ) && propertyCount )
8088  {
8089  properties.resize( propertyCount );
8090  result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
8091  }
8092  } while ( result == VK_INCOMPLETE );
8093  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
8094  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
8095  if ( propertyCount < properties.size() )
8096  {
8097  properties.resize( propertyCount );
8098  }
8099  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
8100  }
8101 
8102  template <typename DisplayPropertiesKHRAllocator,
8103  typename Dispatch,
8104  typename B1,
8107  PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const
8108  {
8109  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8110 
8111  std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
8112  uint32_t propertyCount;
8113  VkResult result;
8114  do
8115  {
8116  result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
8117  if ( ( result == VK_SUCCESS ) && propertyCount )
8118  {
8119  properties.resize( propertyCount );
8120  result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
8121  }
8122  } while ( result == VK_INCOMPLETE );
8123  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
8124  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
8125  if ( propertyCount < properties.size() )
8126  {
8127  properties.resize( propertyCount );
8128  }
8129  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
8130  }
8131 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8132 
8133  template <typename Dispatch>
8136  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8137  {
8138  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8139  return static_cast<Result>(
8140  d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
8141  }
8142 
8143 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8144  template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
8148  {
8149  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8150 
8151  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
8152  uint32_t propertyCount;
8153  VkResult result;
8154  do
8155  {
8156  result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
8157  if ( ( result == VK_SUCCESS ) && propertyCount )
8158  {
8159  properties.resize( propertyCount );
8160  result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
8161  m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
8162  }
8163  } while ( result == VK_INCOMPLETE );
8164  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
8165  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
8166  if ( propertyCount < properties.size() )
8167  {
8168  properties.resize( propertyCount );
8169  }
8170  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
8171  }
8172 
8173  template <typename DisplayPlanePropertiesKHRAllocator,
8174  typename Dispatch,
8175  typename B1,
8179  PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
8180  {
8181  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8182 
8183  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator );
8184  uint32_t propertyCount;
8185  VkResult result;
8186  do
8187  {
8188  result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
8189  if ( ( result == VK_SUCCESS ) && propertyCount )
8190  {
8191  properties.resize( propertyCount );
8192  result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
8193  m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
8194  }
8195  } while ( result == VK_INCOMPLETE );
8196  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
8197  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
8198  if ( propertyCount < properties.size() )
8199  {
8200  properties.resize( propertyCount );
8201  }
8202  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
8203  }
8204 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8205 
8206  template <typename Dispatch>
8208  uint32_t * pDisplayCount,
8210  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8211  {
8212  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8213  return static_cast<Result>(
8214  d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
8215  }
8216 
8217 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8218  template <typename DisplayKHRAllocator, typename Dispatch>
8220  PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
8221  {
8222  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8223 
8224  std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays;
8225  uint32_t displayCount;
8226  VkResult result;
8227  do
8228  {
8229  result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
8230  if ( ( result == VK_SUCCESS ) && displayCount )
8231  {
8232  displays.resize( displayCount );
8233  result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
8234  }
8235  } while ( result == VK_INCOMPLETE );
8236  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
8237  VULKAN_HPP_ASSERT( displayCount <= displays.size() );
8238  if ( displayCount < displays.size() )
8239  {
8240  displays.resize( displayCount );
8241  }
8242  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
8243  }
8244 
8245  template <typename DisplayKHRAllocator,
8246  typename Dispatch,
8247  typename B1,
8250  PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const
8251  {
8252  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8253 
8254  std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
8255  uint32_t displayCount;
8256  VkResult result;
8257  do
8258  {
8259  result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
8260  if ( ( result == VK_SUCCESS ) && displayCount )
8261  {
8262  displays.resize( displayCount );
8263  result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
8264  }
8265  } while ( result == VK_INCOMPLETE );
8266  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
8267  VULKAN_HPP_ASSERT( displayCount <= displays.size() );
8268  if ( displayCount < displays.size() )
8269  {
8270  displays.resize( displayCount );
8271  }
8272  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
8273  }
8274 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8275 
8276  template <typename Dispatch>
8278  uint32_t * pPropertyCount,
8280  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8281  {
8282  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8283  return static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
8284  m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
8285  }
8286 
8287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8288  template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
8292  {
8293  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8294 
8295  std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
8296  uint32_t propertyCount;
8297  VkResult result;
8298  do
8299  {
8300  result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
8301  if ( ( result == VK_SUCCESS ) && propertyCount )
8302  {
8303  properties.resize( propertyCount );
8304  result = d.vkGetDisplayModePropertiesKHR(
8305  m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
8306  }
8307  } while ( result == VK_INCOMPLETE );
8308  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
8309  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
8310  if ( propertyCount < properties.size() )
8311  {
8312  properties.resize( propertyCount );
8313  }
8314  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
8315  }
8316 
8317  template <typename DisplayModePropertiesKHRAllocator,
8318  typename Dispatch,
8319  typename B1,
8324  DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
8325  Dispatch const & d ) const
8326  {
8327  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8328 
8329  std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator );
8330  uint32_t propertyCount;
8331  VkResult result;
8332  do
8333  {
8334  result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
8335  if ( ( result == VK_SUCCESS ) && propertyCount )
8336  {
8337  properties.resize( propertyCount );
8338  result = d.vkGetDisplayModePropertiesKHR(
8339  m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
8340  }
8341  } while ( result == VK_INCOMPLETE );
8342  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
8343  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
8344  if ( propertyCount < properties.size() )
8345  {
8346  properties.resize( propertyCount );
8347  }
8348  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
8349  }
8350 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8351 
8352  template <typename Dispatch>
8355  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8357  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8358  {
8359  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8360  return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice,
8361  static_cast<VkDisplayKHR>( display ),
8362  reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
8363  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8364  reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
8365  }
8366 
8367 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8368  template <typename Dispatch>
8373  Dispatch const & d ) const
8374  {
8375  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8376 
8378  VkResult result =
8379  d.vkCreateDisplayModeKHR( m_physicalDevice,
8380  static_cast<VkDisplayKHR>( display ),
8381  reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
8382  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8383  reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
8384  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
8385 
8386  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), mode );
8387  }
8388 
8389 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8390  template <typename Dispatch>
8395  Dispatch const & d ) const
8396  {
8397  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8398 
8400  VkResult result =
8401  d.vkCreateDisplayModeKHR( m_physicalDevice,
8402  static_cast<VkDisplayKHR>( display ),
8403  reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
8404  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8405  reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
8406  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" );
8407 
8408  return createResultValueType(
8409  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8411  }
8412 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
8413 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8414 
8415  template <typename Dispatch>
8418  uint32_t planeIndex,
8420  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8421  {
8422  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8423  return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR(
8424  m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
8425  }
8426 
8427 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8428  template <typename Dispatch>
8431  {
8432  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8433 
8435  VkResult result = d.vkGetDisplayPlaneCapabilitiesKHR(
8436  m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
8437  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
8438 
8439  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
8440  }
8441 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8442 
8443  template <typename Dispatch>
8445  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8447  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8448  {
8449  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8450  return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
8451  reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
8452  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8453  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8454  }
8455 
8456 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8457  template <typename Dispatch>
8461  Dispatch const & d ) const
8462  {
8463  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8464 
8466  VkResult result = d.vkCreateDisplayPlaneSurfaceKHR(
8467  m_instance,
8468  reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
8469  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8470  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8471  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
8472 
8473  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
8474  }
8475 
8476 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8477  template <typename Dispatch>
8481  Dispatch const & d ) const
8482  {
8483  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8484 
8486  VkResult result = d.vkCreateDisplayPlaneSurfaceKHR(
8487  m_instance,
8488  reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
8489  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8490  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8491  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" );
8492 
8493  return createResultValueType(
8494  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8496  }
8497 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
8498 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8499 
8500  //=== VK_KHR_display_swapchain ===
8501 
8502  template <typename Dispatch>
8504  const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
8505  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8506  VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
8507  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8508  {
8509  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8510  return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device,
8511  swapchainCount,
8512  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ),
8513  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8514  reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
8515  }
8516 
8517 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8518  template <typename SwapchainKHRAllocator, typename Dispatch>
8522  Dispatch const & d ) const
8523  {
8524  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8525 
8526  std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
8527  VkResult result = d.vkCreateSharedSwapchainsKHR(
8528  m_device,
8529  createInfos.size(),
8530  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
8531  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8532  reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
8533  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
8534 
8535  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
8536  }
8537 
8538  template <typename SwapchainKHRAllocator,
8539  typename Dispatch,
8540  typename B0,
8545  SwapchainKHRAllocator & swapchainKHRAllocator,
8546  Dispatch const & d ) const
8547  {
8548  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8549 
8550  std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
8551  VkResult result = d.vkCreateSharedSwapchainsKHR(
8552  m_device,
8553  createInfos.size(),
8554  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
8555  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8556  reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
8557  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
8558 
8559  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
8560  }
8561 
8562  template <typename Dispatch>
8566  Dispatch const & d ) const
8567  {
8568  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8569 
8571  VkResult result = d.vkCreateSharedSwapchainsKHR(
8572  m_device,
8573  1,
8574  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
8575  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8576  reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
8577  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
8578 
8579  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
8580  }
8581 
8582 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8583  template <typename Dispatch, typename SwapchainKHRAllocator>
8588  Dispatch const & d ) const
8589  {
8590  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8591 
8592  std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
8593  VkResult result = d.vkCreateSharedSwapchainsKHR(
8594  m_device,
8595  createInfos.size(),
8596  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
8597  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8598  reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
8599  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
8600  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
8601  uniqueSwapchains.reserve( createInfos.size() );
8602  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
8603  for ( auto const & swapchain : swapchains )
8604  {
8605  uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
8606  }
8607  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
8608  }
8609 
8610  template <typename Dispatch,
8611  typename SwapchainKHRAllocator,
8612  typename B0,
8613  typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type>
8618  SwapchainKHRAllocator & swapchainKHRAllocator,
8619  Dispatch const & d ) const
8620  {
8621  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8622 
8623  std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
8624  VkResult result = d.vkCreateSharedSwapchainsKHR(
8625  m_device,
8626  createInfos.size(),
8627  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
8628  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8629  reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
8630  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
8631  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
8632  uniqueSwapchains.reserve( createInfos.size() );
8633  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
8634  for ( auto const & swapchain : swapchains )
8635  {
8636  uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
8637  }
8638  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
8639  }
8640 
8641  template <typename Dispatch>
8645  Dispatch const & d ) const
8646  {
8647  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8648 
8650  VkResult result = d.vkCreateSharedSwapchainsKHR(
8651  m_device,
8652  1,
8653  reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
8654  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8655  reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
8656  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" );
8657 
8658  return createResultValueType(
8659  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8661  }
8662 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
8663 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8664 
8665 #if defined( VK_USE_PLATFORM_XLIB_KHR )
8666  //=== VK_KHR_xlib_surface ===
8667 
8668  template <typename Dispatch>
8669  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
8670  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8672  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8673  {
8674  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8675  return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance,
8676  reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
8677  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8678  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8679  }
8680 
8681 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8682  template <typename Dispatch>
8684  Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
8685  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8686  Dispatch const & d ) const
8687  {
8688  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8689 
8691  VkResult result =
8692  d.vkCreateXlibSurfaceKHR( m_instance,
8693  reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
8694  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8695  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8696  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
8697 
8698  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
8699  }
8700 
8701 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8702  template <typename Dispatch>
8703  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
8704  Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
8705  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8706  Dispatch const & d ) const
8707  {
8708  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8709 
8711  VkResult result =
8712  d.vkCreateXlibSurfaceKHR( m_instance,
8713  reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
8714  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8715  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8716  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" );
8717 
8718  return createResultValueType(
8719  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8720  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
8721  }
8722 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
8723 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8724 
8725  template <typename Dispatch>
8727  PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8728  {
8729  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8730  return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
8731  }
8732 
8733 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8734  template <typename Dispatch>
8736  PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8737  {
8738  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8739 
8740  VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
8741 
8742  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
8743  }
8744 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8745 #endif /*VK_USE_PLATFORM_XLIB_KHR*/
8746 
8747 #if defined( VK_USE_PLATFORM_XCB_KHR )
8748  //=== VK_KHR_xcb_surface ===
8749 
8750  template <typename Dispatch>
8751  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
8752  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8754  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8755  {
8756  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8757  return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance,
8758  reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
8759  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8760  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8761  }
8762 
8763 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8764  template <typename Dispatch>
8766  Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
8767  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8768  Dispatch const & d ) const
8769  {
8770  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8771 
8773  VkResult result =
8774  d.vkCreateXcbSurfaceKHR( m_instance,
8775  reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
8776  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8777  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8778  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
8779 
8780  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
8781  }
8782 
8783 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8784  template <typename Dispatch>
8785  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
8786  Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
8787  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8788  Dispatch const & d ) const
8789  {
8790  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8791 
8793  VkResult result =
8794  d.vkCreateXcbSurfaceKHR( m_instance,
8795  reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
8796  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8797  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8798  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" );
8799 
8800  return createResultValueType(
8801  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8802  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
8803  }
8804 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
8805 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8806 
8807  template <typename Dispatch>
8808  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
8809  xcb_connection_t * connection,
8810  xcb_visualid_t visual_id,
8811  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8812  {
8813  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8814  return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
8815  }
8816 
8817 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8818  template <typename Dispatch>
8819  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
8820  xcb_connection_t & connection,
8821  xcb_visualid_t visual_id,
8822  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8823  {
8824  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8825 
8826  VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
8827 
8828  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
8829  }
8830 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8831 #endif /*VK_USE_PLATFORM_XCB_KHR*/
8832 
8833 #if defined( VK_USE_PLATFORM_WAYLAND_KHR )
8834  //=== VK_KHR_wayland_surface ===
8835 
8836  template <typename Dispatch>
8837  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
8838  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8840  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8841  {
8842  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8843  return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance,
8844  reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
8845  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8846  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8847  }
8848 
8849 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8850  template <typename Dispatch>
8852  Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
8853  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8854  Dispatch const & d ) const
8855  {
8856  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8857 
8859  VkResult result = d.vkCreateWaylandSurfaceKHR(
8860  m_instance,
8861  reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
8862  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8863  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8864  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
8865 
8866  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
8867  }
8868 
8869 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8870  template <typename Dispatch>
8871  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
8872  Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
8873  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8874  Dispatch const & d ) const
8875  {
8876  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8877 
8879  VkResult result = d.vkCreateWaylandSurfaceKHR(
8880  m_instance,
8881  reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
8882  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8883  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8884  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" );
8885 
8886  return createResultValueType(
8887  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8888  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
8889  }
8890 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
8891 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8892 
8893  template <typename Dispatch>
8894  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
8895  struct wl_display * display,
8896  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8897  {
8898  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8899  return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
8900  }
8901 
8902 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8903  template <typename Dispatch>
8905  PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8906  {
8907  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8908 
8909  VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
8910 
8911  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
8912  }
8913 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8914 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
8915 
8916 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
8917  //=== VK_KHR_android_surface ===
8918 
8919  template <typename Dispatch>
8920  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
8921  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8923  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8924  {
8925  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8926  return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance,
8927  reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
8928  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8929  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8930  }
8931 
8932 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8933  template <typename Dispatch>
8935  Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
8936  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8937  Dispatch const & d ) const
8938  {
8939  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8940 
8942  VkResult result = d.vkCreateAndroidSurfaceKHR(
8943  m_instance,
8944  reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
8945  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8946  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8947  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
8948 
8949  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
8950  }
8951 
8952 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8953  template <typename Dispatch>
8954  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
8955  Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
8956  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8957  Dispatch const & d ) const
8958  {
8959  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8960 
8962  VkResult result = d.vkCreateAndroidSurfaceKHR(
8963  m_instance,
8964  reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
8965  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8966  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8967  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" );
8968 
8969  return createResultValueType(
8970  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8971  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
8972  }
8973 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
8974 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8975 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
8976 
8977 #if defined( VK_USE_PLATFORM_WIN32_KHR )
8978  //=== VK_KHR_win32_surface ===
8979 
8980  template <typename Dispatch>
8981  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
8982  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
8984  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8985  {
8986  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8987  return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance,
8988  reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
8989  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8990  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8991  }
8992 
8993 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8994  template <typename Dispatch>
8996  Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
8997  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8998  Dispatch const & d ) const
8999  {
9000  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9001 
9003  VkResult result =
9004  d.vkCreateWin32SurfaceKHR( m_instance,
9005  reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
9006  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9007  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
9008  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
9009 
9010  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
9011  }
9012 
9013 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9014  template <typename Dispatch>
9015  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
9016  Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
9017  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9018  Dispatch const & d ) const
9019  {
9020  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9021 
9023  VkResult result =
9024  d.vkCreateWin32SurfaceKHR( m_instance,
9025  reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
9026  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9027  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
9028  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" );
9029 
9030  return createResultValueType(
9031  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
9032  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
9033  }
9034 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9035 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9036 
9037  template <typename Dispatch>
9038  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9039  {
9040  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9041  return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
9042  }
9043 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
9044 
9045  //=== VK_EXT_debug_report ===
9046 
9047  template <typename Dispatch>
9050  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9052  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9053  {
9054  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9055  return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance,
9056  reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
9057  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9058  reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
9059  }
9060 
9061 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9062  template <typename Dispatch>
9066  Dispatch const & d ) const
9067  {
9068  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9069 
9071  VkResult result = d.vkCreateDebugReportCallbackEXT(
9072  m_instance,
9073  reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
9074  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9075  reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
9076  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
9077 
9078  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), callback );
9079  }
9080 
9081 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9082  template <typename Dispatch>
9086  Dispatch const & d ) const
9087  {
9088  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9089 
9091  VkResult result = d.vkCreateDebugReportCallbackEXT(
9092  m_instance,
9093  reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
9094  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9095  reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
9096  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" );
9097 
9098  return createResultValueType(
9099  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
9101  }
9102 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9103 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9104 
9105  template <typename Dispatch>
9107  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9108  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9109  {
9110  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9111  d.vkDestroyDebugReportCallbackEXT(
9112  m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9113  }
9114 
9115 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9116  template <typename Dispatch>
9119  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9120  {
9121  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9122 
9123  d.vkDestroyDebugReportCallbackEXT(
9124  m_instance,
9125  static_cast<VkDebugReportCallbackEXT>( callback ),
9126  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9127  }
9128 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9129 
9130  template <typename Dispatch>
9132  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9133  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9134  {
9135  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9136  d.vkDestroyDebugReportCallbackEXT(
9137  m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9138  }
9139 
9140 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9141  template <typename Dispatch>
9144  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9145  {
9146  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9147 
9148  d.vkDestroyDebugReportCallbackEXT(
9149  m_instance,
9150  static_cast<VkDebugReportCallbackEXT>( callback ),
9151  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9152  }
9153 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9154 
9155  template <typename Dispatch>
9158  uint64_t object,
9159  size_t location,
9160  int32_t messageCode,
9161  const char * pLayerPrefix,
9162  const char * pMessage,
9163  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9164  {
9165  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9166  d.vkDebugReportMessageEXT( m_instance,
9167  static_cast<VkDebugReportFlagsEXT>( flags ),
9168  static_cast<VkDebugReportObjectTypeEXT>( objectType ),
9169  object,
9170  location,
9171  messageCode,
9172  pLayerPrefix,
9173  pMessage );
9174  }
9175 
9176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9177  template <typename Dispatch>
9180  uint64_t object,
9181  size_t location,
9182  int32_t messageCode,
9183  const std::string & layerPrefix,
9184  const std::string & message,
9185  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9186  {
9187  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9188 
9189  d.vkDebugReportMessageEXT( m_instance,
9190  static_cast<VkDebugReportFlagsEXT>( flags ),
9191  static_cast<VkDebugReportObjectTypeEXT>( objectType ),
9192  object,
9193  location,
9194  messageCode,
9195  layerPrefix.c_str(),
9196  message.c_str() );
9197  }
9198 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9199 
9200  //=== VK_EXT_debug_marker ===
9201 
9202  template <typename Dispatch>
9204  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9205  {
9206  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9207  return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
9208  }
9209 
9210 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9211  template <typename Dispatch>
9214  {
9215  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9216 
9217  VkResult result = d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) );
9218  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
9219 
9220  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
9221  }
9222 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9223 
9224  template <typename Dispatch>
9226  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9227  {
9228  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9229  return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
9230  }
9231 
9232 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9233  template <typename Dispatch>
9236  {
9237  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9238 
9239  VkResult result = d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) );
9240  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
9241 
9242  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
9243  }
9244 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9245 
9246  template <typename Dispatch>
9248  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9249  {
9250  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9251  d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
9252  }
9253 
9254 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9255  template <typename Dispatch>
9257  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9258  {
9259  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9260 
9261  d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
9262  }
9263 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9264 
9265  template <typename Dispatch>
9267  {
9268  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9269  d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
9270  }
9271 
9272  template <typename Dispatch>
9274  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9275  {
9276  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9277  d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
9278  }
9279 
9280 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9281  template <typename Dispatch>
9283  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9284  {
9285  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9286 
9287  d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
9288  }
9289 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9290 
9291 #if defined( VK_ENABLE_BETA_EXTENSIONS )
9292  //=== VK_KHR_video_queue ===
9293 
9294  template <typename Dispatch>
9295  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,
9296  VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
9297  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9298  {
9299  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9300  return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
9301  m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
9302  }
9303 
9304 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9305  template <typename Dispatch>
9307  PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
9308  {
9309  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9310 
9311  VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
9312  VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
9313  m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
9314  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
9315 
9316  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
9317  }
9318 
9319  template <typename X, typename Y, typename... Z, typename Dispatch>
9320  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
9321  PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
9322  {
9323  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9324 
9325  StructureChain<X, Y, Z...> structureChain;
9326  VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
9327  VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
9328  m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
9329  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
9330 
9331  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
9332  }
9333 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9334 
9335  template <typename Dispatch>
9337  PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
9338  uint32_t * pVideoFormatPropertyCount,
9339  VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
9340  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9341  {
9342  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9343  return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
9344  reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
9345  pVideoFormatPropertyCount,
9346  reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
9347  }
9348 
9349 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9350  template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch>
9352  typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
9353  PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const
9354  {
9355  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9356 
9357  std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
9358  uint32_t videoFormatPropertyCount;
9359  VkResult result;
9360  do
9361  {
9362  result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
9363  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
9364  if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
9365  {
9366  videoFormatProperties.resize( videoFormatPropertyCount );
9367  result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
9368  reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
9369  &videoFormatPropertyCount,
9370  reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
9371  }
9372  } while ( result == VK_INCOMPLETE );
9373  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
9374  VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
9375  if ( videoFormatPropertyCount < videoFormatProperties.size() )
9376  {
9377  videoFormatProperties.resize( videoFormatPropertyCount );
9378  }
9379  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
9380  }
9381 
9382  template <typename VideoFormatPropertiesKHRAllocator,
9383  typename Dispatch,
9384  typename B1,
9387  typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
9388  PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
9389  VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
9390  Dispatch const & d ) const
9391  {
9392  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9393 
9394  std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator );
9395  uint32_t videoFormatPropertyCount;
9396  VkResult result;
9397  do
9398  {
9399  result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
9400  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
9401  if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
9402  {
9403  videoFormatProperties.resize( videoFormatPropertyCount );
9404  result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
9405  reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
9406  &videoFormatPropertyCount,
9407  reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
9408  }
9409  } while ( result == VK_INCOMPLETE );
9410  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
9411  VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
9412  if ( videoFormatPropertyCount < videoFormatProperties.size() )
9413  {
9414  videoFormatProperties.resize( videoFormatPropertyCount );
9415  }
9416  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
9417  }
9418 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9419 
9420  template <typename Dispatch>
9421  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
9422  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9423  VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
9424  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9425  {
9426  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9427  return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device,
9428  reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ),
9429  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9430  reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
9431  }
9432 
9433 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9434  template <typename Dispatch>
9436  Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
9437  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9438  Dispatch const & d ) const
9439  {
9440  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9441 
9442  VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
9443  VkResult result =
9444  d.vkCreateVideoSessionKHR( m_device,
9445  reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
9446  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9447  reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
9448  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" );
9449 
9450  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSession );
9451  }
9452 
9453 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9454  template <typename Dispatch>
9455  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
9456  Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
9457  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9458  Dispatch const & d ) const
9459  {
9460  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9461 
9462  VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
9463  VkResult result =
9464  d.vkCreateVideoSessionKHR( m_device,
9465  reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
9466  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9467  reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
9468  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" );
9469 
9470  return createResultValueType(
9471  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
9472  UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
9473  }
9474 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9475 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9476 
9477  template <typename Dispatch>
9478  VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9479  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9480  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9481  {
9482  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9483  d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9484  }
9485 
9486 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9487  template <typename Dispatch>
9488  VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9489  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9490  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9491  {
9492  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9493 
9494  d.vkDestroyVideoSessionKHR(
9495  m_device,
9496  static_cast<VkVideoSessionKHR>( videoSession ),
9497  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9498  }
9499 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9500 
9501  template <typename Dispatch>
9502  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9503  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9504  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9505  {
9506  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9507  d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9508  }
9509 
9510 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9511  template <typename Dispatch>
9512  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9513  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9514  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9515  {
9516  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9517 
9518  d.vkDestroyVideoSessionKHR(
9519  m_device,
9520  static_cast<VkVideoSessionKHR>( videoSession ),
9521  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9522  }
9523 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9524 
9525  template <typename Dispatch>
9527  Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9528  uint32_t * pMemoryRequirementsCount,
9529  VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,
9530  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9531  {
9532  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9533  return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
9534  static_cast<VkVideoSessionKHR>( videoSession ),
9535  pMemoryRequirementsCount,
9536  reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) );
9537  }
9538 
9539 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9540  template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch>
9542  typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
9543  Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const
9544  {
9545  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9546 
9547  std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements;
9548  uint32_t memoryRequirementsCount;
9549  VkResult result;
9550  do
9551  {
9552  result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
9553  if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
9554  {
9555  memoryRequirements.resize( memoryRequirementsCount );
9556  result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
9557  static_cast<VkVideoSessionKHR>( videoSession ),
9558  &memoryRequirementsCount,
9559  reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
9560  }
9561  } while ( result == VK_INCOMPLETE );
9562  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
9563  VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
9564  if ( memoryRequirementsCount < memoryRequirements.size() )
9565  {
9566  memoryRequirements.resize( memoryRequirementsCount );
9567  }
9568  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryRequirements );
9569  }
9570 
9571  template <typename VideoSessionMemoryRequirementsKHRAllocator,
9572  typename Dispatch,
9573  typename B1,
9576  typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
9577  Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9578  VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,
9579  Dispatch const & d ) const
9580  {
9581  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9582 
9583  std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements(
9584  videoSessionMemoryRequirementsKHRAllocator );
9585  uint32_t memoryRequirementsCount;
9586  VkResult result;
9587  do
9588  {
9589  result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
9590  if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
9591  {
9592  memoryRequirements.resize( memoryRequirementsCount );
9593  result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
9594  static_cast<VkVideoSessionKHR>( videoSession ),
9595  &memoryRequirementsCount,
9596  reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
9597  }
9598  } while ( result == VK_INCOMPLETE );
9599  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
9600  VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
9601  if ( memoryRequirementsCount < memoryRequirements.size() )
9602  {
9603  memoryRequirements.resize( memoryRequirementsCount );
9604  }
9605  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryRequirements );
9606  }
9607 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9608 
9609  template <typename Dispatch>
9611  Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9612  uint32_t bindSessionMemoryInfoCount,
9613  const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,
9614  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9615  {
9616  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9617  return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device,
9618  static_cast<VkVideoSessionKHR>( videoSession ),
9619  bindSessionMemoryInfoCount,
9620  reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) );
9621  }
9622 
9623 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9624  template <typename Dispatch>
9626  VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9628  Dispatch const & d ) const
9629  {
9630  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9631 
9632  VkResult result = d.vkBindVideoSessionMemoryKHR( m_device,
9633  static_cast<VkVideoSessionKHR>( videoSession ),
9634  bindSessionMemoryInfos.size(),
9635  reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) );
9636  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
9637 
9638  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
9639  }
9640 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9641 
9642  template <typename Dispatch>
9644  Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
9645  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9646  VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
9647  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9648  {
9649  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9650  return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device,
9651  reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ),
9652  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9653  reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
9654  }
9655 
9656 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9657  template <typename Dispatch>
9659  Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
9660  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9661  Dispatch const & d ) const
9662  {
9663  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9664 
9665  VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
9666  VkResult result = d.vkCreateVideoSessionParametersKHR(
9667  m_device,
9668  reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
9669  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9670  reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
9671  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" );
9672 
9673  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSessionParameters );
9674  }
9675 
9676 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9677  template <typename Dispatch>
9678  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
9679  Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
9680  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9681  Dispatch const & d ) const
9682  {
9683  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9684 
9685  VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
9686  VkResult result = d.vkCreateVideoSessionParametersKHR(
9687  m_device,
9688  reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
9689  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9690  reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
9691  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" );
9692 
9693  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
9694  UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>(
9695  videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
9696  }
9697 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
9698 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9699 
9700  template <typename Dispatch>
9702  Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9703  const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
9704  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9705  {
9706  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9707  return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device,
9708  static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
9709  reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
9710  }
9711 
9712 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9713  template <typename Dispatch>
9715  Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9716  const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,
9717  Dispatch const & d ) const
9718  {
9719  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9720 
9721  VkResult result = d.vkUpdateVideoSessionParametersKHR( m_device,
9722  static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
9723  reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) );
9724  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
9725 
9726  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
9727  }
9728 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9729 
9730  template <typename Dispatch>
9731  VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9732  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9733  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9734  {
9735  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9736  d.vkDestroyVideoSessionParametersKHR(
9737  m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9738  }
9739 
9740 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9741  template <typename Dispatch>
9742  VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9743  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9744  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9745  {
9746  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9747 
9748  d.vkDestroyVideoSessionParametersKHR(
9749  m_device,
9750  static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
9751  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9752  }
9753 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9754 
9755  template <typename Dispatch>
9756  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9757  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
9758  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9759  {
9760  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9761  d.vkDestroyVideoSessionParametersKHR(
9762  m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9763  }
9764 
9765 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9766  template <typename Dispatch>
9767  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9768  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9769  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9770  {
9771  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9772 
9773  d.vkDestroyVideoSessionParametersKHR(
9774  m_device,
9775  static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
9776  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9777  }
9778 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9779 
9780  template <typename Dispatch>
9781  VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
9782  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9783  {
9784  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9785  d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
9786  }
9787 
9788 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9789  template <typename Dispatch>
9790  VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,
9791  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9792  {
9793  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9794 
9795  d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
9796  }
9797 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9798 
9799  template <typename Dispatch>
9800  VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
9801  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9802  {
9803  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9804  d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
9805  }
9806 
9807 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9808  template <typename Dispatch>
9809  VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,
9810  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9811  {
9812  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9813 
9814  d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
9815  }
9816 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9817 
9818  template <typename Dispatch>
9819  VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
9820  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9821  {
9822  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9823  d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
9824  }
9825 
9826 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9827  template <typename Dispatch>
9828  VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,
9829  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9830  {
9831  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9832 
9833  d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
9834  }
9835 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9836 #endif /*VK_ENABLE_BETA_EXTENSIONS*/
9837 
9838 #if defined( VK_ENABLE_BETA_EXTENSIONS )
9839  //=== VK_KHR_video_decode_queue ===
9840 
9841  template <typename Dispatch>
9842  VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,
9843  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9844  {
9845  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9846  d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) );
9847  }
9848 
9849 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9850  template <typename Dispatch>
9851  VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,
9852  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9853  {
9854  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9855 
9856  d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
9857  }
9858 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9859 #endif /*VK_ENABLE_BETA_EXTENSIONS*/
9860 
9861  //=== VK_EXT_transform_feedback ===
9862 
9863  template <typename Dispatch>
9865  uint32_t bindingCount,
9866  const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
9867  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
9868  const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
9869  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9870  {
9871  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9872  d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
9873  firstBinding,
9874  bindingCount,
9875  reinterpret_cast<const VkBuffer *>( pBuffers ),
9876  reinterpret_cast<const VkDeviceSize *>( pOffsets ),
9877  reinterpret_cast<const VkDeviceSize *>( pSizes ) );
9878  }
9879 
9880 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9881  template <typename Dispatch>
9882  VULKAN_HPP_INLINE void
9887  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
9888  {
9889  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9890 # ifdef VULKAN_HPP_NO_EXCEPTIONS
9891  VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
9892  VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
9893 # else
9894  if ( buffers.size() != offsets.size() )
9895  {
9896  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
9897  }
9898  if ( !sizes.empty() && buffers.size() != sizes.size() )
9899  {
9900  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
9901  }
9902 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
9903 
9904  d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
9905  firstBinding,
9906  buffers.size(),
9907  reinterpret_cast<const VkBuffer *>( buffers.data() ),
9908  reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
9909  reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
9910  }
9911 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9912 
9913  template <typename Dispatch>
9915  uint32_t counterBufferCount,
9916  const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
9917  const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
9918  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9919  {
9920  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9921  d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
9922  firstCounterBuffer,
9923  counterBufferCount,
9924  reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
9925  reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
9926  }
9927 
9928 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9929  template <typename Dispatch>
9930  VULKAN_HPP_INLINE void
9931  CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
9934  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
9935  {
9936  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9937 # ifdef VULKAN_HPP_NO_EXCEPTIONS
9938  VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
9939 # else
9940  if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
9941  {
9942  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
9943  }
9944 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
9945 
9946  d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
9947  firstCounterBuffer,
9948  counterBuffers.size(),
9949  reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
9950  reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
9951  }
9952 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9953 
9954  template <typename Dispatch>
9956  uint32_t counterBufferCount,
9957  const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
9958  const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
9959  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
9960  {
9961  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9962  d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
9963  firstCounterBuffer,
9964  counterBufferCount,
9965  reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
9966  reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
9967  }
9968 
9969 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9970  template <typename Dispatch>
9971  VULKAN_HPP_INLINE void
9972  CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
9975  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
9976  {
9977  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9978 # ifdef VULKAN_HPP_NO_EXCEPTIONS
9979  VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
9980 # else
9981  if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
9982  {
9983  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
9984  }
9985 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
9986 
9987  d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
9988  firstCounterBuffer,
9989  counterBuffers.size(),
9990  reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
9991  reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
9992  }
9993 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9994 
9995  template <typename Dispatch>
9997  uint32_t query,
9999  uint32_t index,
10000  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10001  {
10002  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10003  d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
10004  }
10005 
10006  template <typename Dispatch>
10007  VULKAN_HPP_INLINE void
10008  CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10009  {
10010  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10011  d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
10012  }
10013 
10014  template <typename Dispatch>
10016  uint32_t firstInstance,
10017  VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
10018  VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
10019  uint32_t counterOffset,
10020  uint32_t vertexStride,
10021  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10022  {
10023  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10024  d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
10025  instanceCount,
10026  firstInstance,
10027  static_cast<VkBuffer>( counterBuffer ),
10028  static_cast<VkDeviceSize>( counterBufferOffset ),
10029  counterOffset,
10030  vertexStride );
10031  }
10032 
10033  //=== VK_NVX_binary_import ===
10034 
10035  template <typename Dispatch>
10037  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10039  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10040  {
10041  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10042  return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
10043  reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ),
10044  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10045  reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
10046  }
10047 
10048 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10049  template <typename Dispatch>
10053  Dispatch const & d ) const
10054  {
10055  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10056 
10058  VkResult result =
10059  d.vkCreateCuModuleNVX( m_device,
10060  reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
10061  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10062  reinterpret_cast<VkCuModuleNVX *>( &module ) );
10063  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
10064 
10065  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), module );
10066  }
10067 
10068 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10069  template <typename Dispatch>
10073  Dispatch const & d ) const
10074  {
10075  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10076 
10078  VkResult result =
10079  d.vkCreateCuModuleNVX( m_device,
10080  reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
10081  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10082  reinterpret_cast<VkCuModuleNVX *>( &module ) );
10083  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" );
10084 
10085  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
10087  }
10088 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
10089 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10090 
10091  template <typename Dispatch>
10093  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10095  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10096  {
10097  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10098  return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device,
10099  reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
10100  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10101  reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
10102  }
10103 
10104 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10105  template <typename Dispatch>
10109  Dispatch const & d ) const
10110  {
10111  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10112 
10114  VkResult result =
10115  d.vkCreateCuFunctionNVX( m_device,
10116  reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
10117  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10118  reinterpret_cast<VkCuFunctionNVX *>( &function ) );
10119  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
10120 
10121  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), function );
10122  }
10123 
10124 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10125  template <typename Dispatch>
10129  Dispatch const & d ) const
10130  {
10131  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10132 
10134  VkResult result =
10135  d.vkCreateCuFunctionNVX( m_device,
10136  reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
10137  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10138  reinterpret_cast<VkCuFunctionNVX *>( &function ) );
10139  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" );
10140 
10141  return createResultValueType(
10142  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
10144  }
10145 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
10146 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10147 
10148  template <typename Dispatch>
10150  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10151  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10152  {
10153  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10154  d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10155  }
10156 
10157 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10158  template <typename Dispatch>
10161  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10162  {
10163  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10164 
10165  d.vkDestroyCuModuleNVX( m_device,
10166  static_cast<VkCuModuleNVX>( module ),
10167  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10168  }
10169 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10170 
10171  template <typename Dispatch>
10173  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10174  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10175  {
10176  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10177  d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10178  }
10179 
10180 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10181  template <typename Dispatch>
10184  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10185  {
10186  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10187 
10188  d.vkDestroyCuModuleNVX( m_device,
10189  static_cast<VkCuModuleNVX>( module ),
10190  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10191  }
10192 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10193 
10194  template <typename Dispatch>
10196  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10197  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10198  {
10199  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10200  d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10201  }
10202 
10203 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10204  template <typename Dispatch>
10207  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10208  {
10209  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10210 
10211  d.vkDestroyCuFunctionNVX( m_device,
10212  static_cast<VkCuFunctionNVX>( function ),
10213  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10214  }
10215 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10216 
10217  template <typename Dispatch>
10219  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10220  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10221  {
10222  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10223  d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10224  }
10225 
10226 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10227  template <typename Dispatch>
10230  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10231  {
10232  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10233 
10234  d.vkDestroyCuFunctionNVX( m_device,
10235  static_cast<VkCuFunctionNVX>( function ),
10236  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10237  }
10238 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10239 
10240  template <typename Dispatch>
10242  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10243  {
10244  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10245  d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
10246  }
10247 
10248 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10249  template <typename Dispatch>
10251  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10252  {
10253  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10254 
10255  d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
10256  }
10257 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10258 
10259  //=== VK_NVX_image_view_handle ===
10260 
10261  template <typename Dispatch>
10263  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10264  {
10265  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10266  return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
10267  }
10268 
10269 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10270  template <typename Dispatch>
10272  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10273  {
10274  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10275 
10276  uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
10277 
10278  return result;
10279  }
10280 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10281 
10282  template <typename Dispatch>
10285  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10286  {
10287  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10288  return static_cast<Result>(
10289  d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
10290  }
10291 
10292 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10293  template <typename Dispatch>
10296  {
10297  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10298 
10300  VkResult result =
10301  d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) );
10302  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
10303 
10304  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
10305  }
10306 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10307 
10308  //=== VK_AMD_draw_indirect_count ===
10309 
10310  template <typename Dispatch>
10313  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
10314  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
10315  uint32_t maxDrawCount,
10316  uint32_t stride,
10317  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10318  {
10319  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10320  d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
10321  static_cast<VkBuffer>( buffer ),
10322  static_cast<VkDeviceSize>( offset ),
10323  static_cast<VkBuffer>( countBuffer ),
10324  static_cast<VkDeviceSize>( countBufferOffset ),
10325  maxDrawCount,
10326  stride );
10327  }
10328 
10329  template <typename Dispatch>
10332  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
10333  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
10334  uint32_t maxDrawCount,
10335  uint32_t stride,
10336  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10337  {
10338  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10339  d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
10340  static_cast<VkBuffer>( buffer ),
10341  static_cast<VkDeviceSize>( offset ),
10342  static_cast<VkBuffer>( countBuffer ),
10343  static_cast<VkDeviceSize>( countBufferOffset ),
10344  maxDrawCount,
10345  stride );
10346  }
10347 
10348  //=== VK_AMD_shader_info ===
10349 
10350  template <typename Dispatch>
10354  size_t * pInfoSize,
10355  void * pInfo,
10356  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10357  {
10358  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10359  return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
10360  static_cast<VkPipeline>( pipeline ),
10361  static_cast<VkShaderStageFlagBits>( shaderStage ),
10362  static_cast<VkShaderInfoTypeAMD>( infoType ),
10363  pInfoSize,
10364  pInfo ) );
10365  }
10366 
10367 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10368  template <typename Uint8_tAllocator, typename Dispatch>
10373  Dispatch const & d ) const
10374  {
10375  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10376 
10377  std::vector<uint8_t, Uint8_tAllocator> info;
10378  size_t infoSize;
10379  VkResult result;
10380  do
10381  {
10382  result = d.vkGetShaderInfoAMD( m_device,
10383  static_cast<VkPipeline>( pipeline ),
10384  static_cast<VkShaderStageFlagBits>( shaderStage ),
10385  static_cast<VkShaderInfoTypeAMD>( infoType ),
10386  &infoSize,
10387  nullptr );
10388  if ( ( result == VK_SUCCESS ) && infoSize )
10389  {
10390  info.resize( infoSize );
10391  result = d.vkGetShaderInfoAMD( m_device,
10392  static_cast<VkPipeline>( pipeline ),
10393  static_cast<VkShaderStageFlagBits>( shaderStage ),
10394  static_cast<VkShaderInfoTypeAMD>( infoType ),
10395  &infoSize,
10396  reinterpret_cast<void *>( info.data() ) );
10397  }
10398  } while ( result == VK_INCOMPLETE );
10399  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
10400  VULKAN_HPP_ASSERT( infoSize <= info.size() );
10401  if ( infoSize < info.size() )
10402  {
10403  info.resize( infoSize );
10404  }
10405  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
10406  }
10407 
10408  template <typename Uint8_tAllocator,
10409  typename Dispatch,
10410  typename B1,
10416  Uint8_tAllocator & uint8_tAllocator,
10417  Dispatch const & d ) const
10418  {
10419  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10420 
10421  std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
10422  size_t infoSize;
10423  VkResult result;
10424  do
10425  {
10426  result = d.vkGetShaderInfoAMD( m_device,
10427  static_cast<VkPipeline>( pipeline ),
10428  static_cast<VkShaderStageFlagBits>( shaderStage ),
10429  static_cast<VkShaderInfoTypeAMD>( infoType ),
10430  &infoSize,
10431  nullptr );
10432  if ( ( result == VK_SUCCESS ) && infoSize )
10433  {
10434  info.resize( infoSize );
10435  result = d.vkGetShaderInfoAMD( m_device,
10436  static_cast<VkPipeline>( pipeline ),
10437  static_cast<VkShaderStageFlagBits>( shaderStage ),
10438  static_cast<VkShaderInfoTypeAMD>( infoType ),
10439  &infoSize,
10440  reinterpret_cast<void *>( info.data() ) );
10441  }
10442  } while ( result == VK_INCOMPLETE );
10443  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
10444  VULKAN_HPP_ASSERT( infoSize <= info.size() );
10445  if ( infoSize < info.size() )
10446  {
10447  info.resize( infoSize );
10448  }
10449  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
10450  }
10451 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10452 
10453  //=== VK_KHR_dynamic_rendering ===
10454 
10455  template <typename Dispatch>
10457  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10458  {
10459  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10460  d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
10461  }
10462 
10463 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10464  template <typename Dispatch>
10466  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10467  {
10468  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10469 
10470  d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
10471  }
10472 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10473 
10474  template <typename Dispatch>
10476  {
10477  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10478  d.vkCmdEndRenderingKHR( m_commandBuffer );
10479  }
10480 
10481 #if defined( VK_USE_PLATFORM_GGP )
10482  //=== VK_GGP_stream_descriptor_surface ===
10483 
10484  template <typename Dispatch>
10486  Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
10487  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
10489  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10490  {
10491  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10492  return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance,
10493  reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
10494  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10495  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
10496  }
10497 
10498 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10499  template <typename Dispatch>
10501  Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
10502  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
10503  Dispatch const & d ) const
10504  {
10505  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10506 
10508  VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
10509  m_instance,
10510  reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
10511  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10512  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
10513  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
10514 
10515  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
10516  }
10517 
10518 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10519  template <typename Dispatch>
10520  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
10521  Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
10522  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
10523  Dispatch const & d ) const
10524  {
10525  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10526 
10528  VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
10529  m_instance,
10530  reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
10531  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10532  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
10533  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" );
10534 
10535  return createResultValueType(
10536  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
10537  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
10538  }
10539 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
10540 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10541 #endif /*VK_USE_PLATFORM_GGP*/
10542 
10543  //=== VK_NV_external_memory_capabilities ===
10544 
10545  template <typename Dispatch>
10553  VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
10554  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10555  {
10556  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10557  return static_cast<Result>(
10558  d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
10559  static_cast<VkFormat>( format ),
10560  static_cast<VkImageType>( type ),
10561  static_cast<VkImageTiling>( tiling ),
10562  static_cast<VkImageUsageFlags>( usage ),
10563  static_cast<VkImageCreateFlags>( flags ),
10564  static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
10565  reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
10566  }
10567 
10568 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10569  template <typename Dispatch>
10577  Dispatch const & d ) const
10578  {
10579  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10580 
10581  VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
10582  VkResult result =
10583  d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
10584  static_cast<VkFormat>( format ),
10585  static_cast<VkImageType>( type ),
10586  static_cast<VkImageTiling>( tiling ),
10587  static_cast<VkImageUsageFlags>( usage ),
10588  static_cast<VkImageCreateFlags>( flags ),
10589  static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
10590  reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) );
10591  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
10592 
10593  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), externalImageFormatProperties );
10594  }
10595 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10596 
10597 #if defined( VK_USE_PLATFORM_WIN32_KHR )
10598  //=== VK_NV_external_memory_win32 ===
10599 
10600  template <typename Dispatch>
10603  HANDLE * pHandle,
10604  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10605  {
10606  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10607  return static_cast<Result>(
10608  d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
10609  }
10610 
10611 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10612  template <typename Dispatch>
10613  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV(
10615  {
10616  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10617 
10618  HANDLE handle;
10619  VkResult result =
10620  d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle );
10621  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
10622 
10623  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
10624  }
10625 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10626 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
10627 
10628  //=== VK_KHR_get_physical_device_properties2 ===
10629 
10630  template <typename Dispatch>
10632  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10633  {
10634  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10635  d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
10636  }
10637 
10638 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10639  template <typename Dispatch>
10642  {
10643  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10644 
10646  d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
10647 
10648  return features;
10649  }
10650 
10651  template <typename X, typename Y, typename... Z, typename Dispatch>
10653  {
10654  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10655 
10656  StructureChain<X, Y, Z...> structureChain;
10657  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
10658  d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
10659 
10660  return structureChain;
10661  }
10662 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10663 
10664  template <typename Dispatch>
10666  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10667  {
10668  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10669  d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
10670  }
10671 
10672 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10673  template <typename Dispatch>
10676  {
10677  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10678 
10680  d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
10681 
10682  return properties;
10683  }
10684 
10685  template <typename X, typename Y, typename... Z, typename Dispatch>
10687  {
10688  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10689 
10690  StructureChain<X, Y, Z...> structureChain;
10691  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
10692  d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
10693 
10694  return structureChain;
10695  }
10696 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10697 
10698  template <typename Dispatch>
10700  VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
10701  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10702  {
10703  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10704  d.vkGetPhysicalDeviceFormatProperties2KHR(
10705  m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
10706  }
10707 
10708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10709  template <typename Dispatch>
10712  {
10713  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10714 
10715  VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
10716  d.vkGetPhysicalDeviceFormatProperties2KHR(
10717  m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
10718 
10719  return formatProperties;
10720  }
10721 
10722  template <typename X, typename Y, typename... Z, typename Dispatch>
10724  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10725  {
10726  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10727 
10728  StructureChain<X, Y, Z...> structureChain;
10729  VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
10730  d.vkGetPhysicalDeviceFormatProperties2KHR(
10731  m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
10732 
10733  return structureChain;
10734  }
10735 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10736 
10737  template <typename Dispatch>
10740  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
10741  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10742  {
10743  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10744  return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
10745  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
10746  reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
10747  }
10748 
10749 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10750  template <typename Dispatch>
10753  {
10754  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10755 
10756  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
10757  VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
10758  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
10759  reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
10760  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
10761 
10762  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
10763  }
10764 
10765  template <typename X, typename Y, typename... Z, typename Dispatch>
10768  {
10769  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10770 
10771  StructureChain<X, Y, Z...> structureChain;
10772  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
10773  VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
10774  reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
10775  reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
10776  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
10777 
10778  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
10779  }
10780 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10781 
10782  template <typename Dispatch>
10783  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
10784  VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
10785  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10786  {
10787  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10788  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10789  m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
10790  }
10791 
10792 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10793  template <typename QueueFamilyProperties2Allocator, typename Dispatch>
10794  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
10796  {
10797  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10798 
10799  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
10800  uint32_t queueFamilyPropertyCount;
10801  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
10802  queueFamilyProperties.resize( queueFamilyPropertyCount );
10803  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10804  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
10805 
10806  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
10807  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
10808  {
10809  queueFamilyProperties.resize( queueFamilyPropertyCount );
10810  }
10811  return queueFamilyProperties;
10812  }
10813 
10814  template <typename QueueFamilyProperties2Allocator,
10815  typename Dispatch,
10816  typename B1,
10818  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
10819  PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
10820  {
10821  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10822 
10823  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
10824  uint32_t queueFamilyPropertyCount;
10825  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
10826  queueFamilyProperties.resize( queueFamilyPropertyCount );
10827  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10828  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
10829 
10830  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
10831  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
10832  {
10833  queueFamilyProperties.resize( queueFamilyPropertyCount );
10834  }
10835  return queueFamilyProperties;
10836  }
10837 
10838  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
10839  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
10841  {
10842  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10843 
10844  std::vector<StructureChain, StructureChainAllocator> structureChains;
10845  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
10846  uint32_t queueFamilyPropertyCount;
10847  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
10848  structureChains.resize( queueFamilyPropertyCount );
10849  queueFamilyProperties.resize( queueFamilyPropertyCount );
10850  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
10851  {
10852  queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
10853  }
10854  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10855  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
10856 
10857  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
10858  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
10859  {
10860  structureChains.resize( queueFamilyPropertyCount );
10861  }
10862  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
10863  {
10864  structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
10865  }
10866  return structureChains;
10867  }
10868 
10869  template <typename StructureChain,
10870  typename StructureChainAllocator,
10871  typename Dispatch,
10872  typename B1,
10874  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
10875  PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
10876  {
10877  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10878 
10879  std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
10880  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
10881  uint32_t queueFamilyPropertyCount;
10882  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
10883  structureChains.resize( queueFamilyPropertyCount );
10884  queueFamilyProperties.resize( queueFamilyPropertyCount );
10885  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
10886  {
10887  queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
10888  }
10889  d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10890  m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
10891 
10892  VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
10893  if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
10894  {
10895  structureChains.resize( queueFamilyPropertyCount );
10896  }
10897  for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
10898  {
10899  structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
10900  }
10901  return structureChains;
10902  }
10903 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10904 
10905  template <typename Dispatch>
10907  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10908  {
10909  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10910  d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
10911  }
10912 
10913 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10914  template <typename Dispatch>
10917  {
10918  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10919 
10921  d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
10922 
10923  return memoryProperties;
10924  }
10925 
10926  template <typename X, typename Y, typename... Z, typename Dispatch>
10928  {
10929  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10930 
10931  StructureChain<X, Y, Z...> structureChain;
10933  structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
10934  d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
10935 
10936  return structureChain;
10937  }
10938 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
10939 
10940  template <typename Dispatch>
10942  uint32_t * pPropertyCount,
10944  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
10945  {
10946  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10947  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
10948  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
10949  pPropertyCount,
10950  reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
10951  }
10952 
10953 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10954  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
10955  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
10957  Dispatch const & d ) const
10958  {
10959  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10960 
10961  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
10962  uint32_t propertyCount;
10963  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
10964  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
10965  properties.resize( propertyCount );
10966  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
10967  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
10968  &propertyCount,
10969  reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
10970 
10971  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
10972  if ( propertyCount < properties.size() )
10973  {
10974  properties.resize( propertyCount );
10975  }
10976  return properties;
10977  }
10978 
10979  template <typename SparseImageFormatProperties2Allocator,
10980  typename Dispatch,
10981  typename B1,
10983  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
10985  SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
10986  Dispatch const & d ) const
10987  {
10988  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
10989 
10990  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
10991  uint32_t propertyCount;
10992  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
10993  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
10994  properties.resize( propertyCount );
10995  d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
10996  reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
10997  &propertyCount,
10998  reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
10999 
11000  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
11001  if ( propertyCount < properties.size() )
11002  {
11003  properties.resize( propertyCount );
11004  }
11005  return properties;
11006  }
11007 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11008 
11009  //=== VK_KHR_device_group ===
11010 
11011  template <typename Dispatch>
11013  uint32_t localDeviceIndex,
11014  uint32_t remoteDeviceIndex,
11015  VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
11016  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11017  {
11018  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11019  d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
11020  m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
11021  }
11022 
11023 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11024  template <typename Dispatch>
11026  uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11027  {
11028  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11029 
11031  d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
11032  m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
11033 
11034  return peerMemoryFeatures;
11035  }
11036 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11037 
11038  template <typename Dispatch>
11039  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11040  {
11041  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11042  d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
11043  }
11044 
11045  template <typename Dispatch>
11047  uint32_t baseGroupY,
11048  uint32_t baseGroupZ,
11049  uint32_t groupCountX,
11050  uint32_t groupCountY,
11051  uint32_t groupCountZ,
11052  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11053  {
11054  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11055  d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
11056  }
11057 
11058 #if defined( VK_USE_PLATFORM_VI_NN )
11059  //=== VK_NN_vi_surface ===
11060 
11061  template <typename Dispatch>
11062  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
11063  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11065  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11066  {
11067  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11068  return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
11069  reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ),
11070  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11071  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
11072  }
11073 
11074 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11075  template <typename Dispatch>
11077  Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
11078  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
11079  Dispatch const & d ) const
11080  {
11081  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11082 
11084  VkResult result =
11085  d.vkCreateViSurfaceNN( m_instance,
11086  reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
11087  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11088  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
11089  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
11090 
11091  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
11092  }
11093 
11094 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11095  template <typename Dispatch>
11096  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
11097  Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
11098  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
11099  Dispatch const & d ) const
11100  {
11101  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11102 
11104  VkResult result =
11105  d.vkCreateViSurfaceNN( m_instance,
11106  reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
11107  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11108  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
11109  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" );
11110 
11111  return createResultValueType(
11112  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
11113  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
11114  }
11115 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
11116 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11117 #endif /*VK_USE_PLATFORM_VI_NN*/
11118 
11119  //=== VK_KHR_maintenance1 ===
11120 
11121  template <typename Dispatch>
11124  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11125  {
11126  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11127  d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
11128  }
11129 
11130  //=== VK_KHR_device_group_creation ===
11131 
11132  template <typename Dispatch>
11134  Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount,
11135  VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
11136  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11137  {
11138  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11139  return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
11140  m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
11141  }
11142 
11143 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11144  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
11148  {
11149  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11150 
11151  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
11152  uint32_t physicalDeviceGroupCount;
11153  VkResult result;
11154  do
11155  {
11156  result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
11157  if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
11158  {
11159  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
11160  result = d.vkEnumeratePhysicalDeviceGroupsKHR(
11161  m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
11162  }
11163  } while ( result == VK_INCOMPLETE );
11164  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
11165  VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
11166  if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
11167  {
11168  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
11169  }
11170  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
11171  }
11172 
11173  template <typename PhysicalDeviceGroupPropertiesAllocator,
11174  typename Dispatch,
11175  typename B1,
11179  Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
11180  {
11181  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11182 
11183  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
11184  physicalDeviceGroupPropertiesAllocator );
11185  uint32_t physicalDeviceGroupCount;
11186  VkResult result;
11187  do
11188  {
11189  result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
11190  if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
11191  {
11192  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
11193  result = d.vkEnumeratePhysicalDeviceGroupsKHR(
11194  m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
11195  }
11196  } while ( result == VK_INCOMPLETE );
11197  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
11198  VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
11199  if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
11200  {
11201  physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
11202  }
11203  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
11204  }
11205 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11206 
11207  //=== VK_KHR_external_memory_capabilities ===
11208 
11209  template <typename Dispatch>
11211  VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
11212  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11213  {
11214  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11215  d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
11216  reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
11217  reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
11218  }
11219 
11220 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11221  template <typename Dispatch>
11224  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11225  {
11226  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11227 
11228  VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
11229  d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
11230  reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
11231  reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
11232 
11233  return externalBufferProperties;
11234  }
11235 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11236 
11237 #if defined( VK_USE_PLATFORM_WIN32_KHR )
11238  //=== VK_KHR_external_memory_win32 ===
11239 
11240  template <typename Dispatch>
11241  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
11242  HANDLE * pHandle,
11243  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11244  {
11245  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11246  return static_cast<Result>(
11247  d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
11248  }
11249 
11250 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11251  template <typename Dispatch>
11253  Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
11254  {
11255  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11256 
11257  HANDLE handle;
11258  VkResult result = d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
11259  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
11260 
11261  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
11262  }
11263 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11264 
11265  template <typename Dispatch>
11267  Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
11268  HANDLE handle,
11269  VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
11270  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11271  {
11272  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11273  return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
11274  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
11275  handle,
11276  reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
11277  }
11278 
11279 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11280  template <typename Dispatch>
11282  Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const
11283  {
11284  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11285 
11286  VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
11287  VkResult result = d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
11288  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
11289  handle,
11290  reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) );
11291  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
11292 
11293  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryWin32HandleProperties );
11294  }
11295 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11296 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
11297 
11298  //=== VK_KHR_external_memory_fd ===
11299 
11300  template <typename Dispatch>
11302  int * pFd,
11303  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11304  {
11305  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11306  return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
11307  }
11308 
11309 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11310  template <typename Dispatch>
11312  Dispatch const & d ) const
11313  {
11314  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11315 
11316  int fd;
11317  VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
11318  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
11319 
11320  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
11321  }
11322 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11323 
11324  template <typename Dispatch>
11326  int fd,
11327  VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
11328  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11329  {
11330  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11331  return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR(
11332  m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
11333  }
11334 
11335 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11336  template <typename Dispatch>
11339  {
11340  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11341 
11343  VkResult result = d.vkGetMemoryFdPropertiesKHR(
11344  m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
11345  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
11346 
11347  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryFdProperties );
11348  }
11349 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11350 
11351  //=== VK_KHR_external_semaphore_capabilities ===
11352 
11353  template <typename Dispatch>
11354  VULKAN_HPP_INLINE void
11356  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
11357  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11358  {
11359  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11360  d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
11361  reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
11362  reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
11363  }
11364 
11365 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11366  template <typename Dispatch>
11369  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11370  {
11371  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11372 
11373  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
11374  d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
11375  reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
11376  reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
11377 
11378  return externalSemaphoreProperties;
11379  }
11380 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11381 
11382 #if defined( VK_USE_PLATFORM_WIN32_KHR )
11383  //=== VK_KHR_external_semaphore_win32 ===
11384 
11385  template <typename Dispatch>
11386  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR(
11387  const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11388  {
11389  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11390  return static_cast<Result>(
11391  d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
11392  }
11393 
11394 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11395  template <typename Dispatch>
11397  Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
11398  Dispatch const & d ) const
11399  {
11400  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11401 
11402  VkResult result =
11403  d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) );
11404  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
11405 
11406  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
11407  }
11408 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11409 
11410  template <typename Dispatch>
11411  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR(
11412  const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11413  {
11414  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11415  return static_cast<Result>(
11416  d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
11417  }
11418 
11419 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11420  template <typename Dispatch>
11422  Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
11423  {
11424  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11425 
11426  HANDLE handle;
11427  VkResult result = d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
11428  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
11429 
11430  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
11431  }
11432 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11433 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
11434 
11435  //=== VK_KHR_external_semaphore_fd ===
11436 
11437  template <typename Dispatch>
11439  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11440  {
11441  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11442  return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
11443  }
11444 
11445 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11446  template <typename Dispatch>
11448  Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
11449  {
11450  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11451 
11452  VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
11453  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
11454 
11455  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
11456  }
11457 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11458 
11459  template <typename Dispatch>
11461  int * pFd,
11462  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11463  {
11464  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11465  return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
11466  }
11467 
11468 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11469  template <typename Dispatch>
11471  Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
11472  {
11473  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11474 
11475  int fd;
11476  VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
11477  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
11478 
11479  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
11480  }
11481 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11482 
11483  //=== VK_KHR_push_descriptor ===
11484 
11485  template <typename Dispatch>
11488  uint32_t set,
11489  uint32_t descriptorWriteCount,
11490  const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
11491  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11492  {
11493  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11494  d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
11495  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
11496  static_cast<VkPipelineLayout>( layout ),
11497  set,
11498  descriptorWriteCount,
11499  reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
11500  }
11501 
11502 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11503  template <typename Dispatch>
11504  VULKAN_HPP_INLINE void
11507  uint32_t set,
11509  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11510  {
11511  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11512 
11513  d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
11514  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
11515  static_cast<VkPipelineLayout>( layout ),
11516  set,
11517  descriptorWrites.size(),
11518  reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
11519  }
11520 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11521 
11522  template <typename Dispatch>
11525  uint32_t set,
11526  const void * pData,
11527  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11528  {
11529  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11530  d.vkCmdPushDescriptorSetWithTemplateKHR(
11531  m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
11532  }
11533 
11534 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11535  template <typename DataType, typename Dispatch>
11538  uint32_t set,
11539  DataType const & data,
11540  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11541  {
11542  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11543 
11544  d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
11545  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
11546  static_cast<VkPipelineLayout>( layout ),
11547  set,
11548  reinterpret_cast<const void *>( &data ) );
11549  }
11550 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11551 
11552  //=== VK_EXT_conditional_rendering ===
11553 
11554  template <typename Dispatch>
11556  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11557  {
11558  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11559  d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
11560  }
11561 
11562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11563  template <typename Dispatch>
11565  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11566  {
11567  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11568 
11569  d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
11570  }
11571 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11572 
11573  template <typename Dispatch>
11575  {
11576  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11577  d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
11578  }
11579 
11580  //=== VK_KHR_descriptor_update_template ===
11581 
11582  template <typename Dispatch>
11585  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11586  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
11587  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11588  {
11589  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11590  return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device,
11591  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
11592  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11593  reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
11594  }
11595 
11596 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11597  template <typename Dispatch>
11601  Dispatch const & d ) const
11602  {
11603  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11604 
11605  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
11606  VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
11607  m_device,
11608  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
11609  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11610  reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
11611  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
11612 
11613  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
11614  }
11615 
11616 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11617  template <typename Dispatch>
11621  Dispatch const & d ) const
11622  {
11623  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11624 
11625  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
11626  VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
11627  m_device,
11628  reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
11629  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11630  reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
11631  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" );
11632 
11633  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
11635  descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
11636  }
11637 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
11638 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11639 
11640  template <typename Dispatch>
11642  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11643  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11644  {
11645  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11646  d.vkDestroyDescriptorUpdateTemplateKHR(
11647  m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
11648  }
11649 
11650 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11651  template <typename Dispatch>
11654  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11655  {
11656  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11657 
11658  d.vkDestroyDescriptorUpdateTemplateKHR(
11659  m_device,
11660  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
11661  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
11662  }
11663 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11664 
11665  template <typename Dispatch>
11667  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
11668  const void * pData,
11669  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11670  {
11671  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11672  d.vkUpdateDescriptorSetWithTemplateKHR(
11673  m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
11674  }
11675 
11676 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11677  template <typename DataType, typename Dispatch>
11679  VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
11680  DataType const & data,
11681  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11682  {
11683  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11684 
11685  d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
11686  static_cast<VkDescriptorSet>( descriptorSet ),
11687  static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
11688  reinterpret_cast<const void *>( &data ) );
11689  }
11690 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11691 
11692  //=== VK_NV_clip_space_w_scaling ===
11693 
11694  template <typename Dispatch>
11696  uint32_t viewportCount,
11697  const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
11698  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11699  {
11700  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11701  d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
11702  }
11703 
11704 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11705  template <typename Dispatch>
11706  VULKAN_HPP_INLINE void
11707  CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
11709  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11710  {
11711  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11712 
11713  d.vkCmdSetViewportWScalingNV(
11714  m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
11715  }
11716 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11717 
11718  //=== VK_EXT_direct_mode_display ===
11719 
11720 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
11721  template <typename Dispatch>
11723  {
11724  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11725  return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
11726  }
11727 #else
11728  template <typename Dispatch>
11730  {
11731  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11732 
11733  d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
11734  }
11735 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
11736 
11737 #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
11738  //=== VK_EXT_acquire_xlib_display ===
11739 
11740  template <typename Dispatch>
11741  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy,
11743  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11744  {
11745  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11746  return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
11747  }
11748 
11749 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11750  template <typename Dispatch>
11752  PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
11753  {
11754  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11755 
11756  VkResult result = d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) );
11757  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
11758 
11759  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
11760  }
11761 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11762 
11763  template <typename Dispatch>
11764  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy,
11765  RROutput rrOutput,
11767  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11768  {
11769  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11770  return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
11771  }
11772 
11773 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11774  template <typename Dispatch>
11776  PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
11777  {
11778  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11779 
11781  VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
11782  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
11783 
11784  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
11785  }
11786 
11787 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11788  template <typename Dispatch>
11789  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
11790  PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
11791  {
11792  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11793 
11795  VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
11796  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" );
11797 
11798  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
11799  UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
11800  }
11801 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
11802 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11803 #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
11804 
11805  //=== VK_EXT_display_surface_counter ===
11806 
11807  template <typename Dispatch>
11810  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
11811  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11812  {
11813  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11814  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
11815  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
11816  }
11817 
11818 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11819  template <typename Dispatch>
11822  {
11823  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11824 
11826  VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
11827  m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
11828  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
11829 
11830  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
11831  }
11832 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11833 
11834  //=== VK_EXT_display_control ===
11835 
11836  template <typename Dispatch>
11838  const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
11839  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11840  {
11841  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11842  return static_cast<Result>(
11843  d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
11844  }
11845 
11846 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11847  template <typename Dispatch>
11849  const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,
11850  Dispatch const & d ) const
11851  {
11852  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11853 
11854  VkResult result =
11855  d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
11856  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
11857 
11858  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
11859  }
11860 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11861 
11862  template <typename Dispatch>
11864  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11865  VULKAN_HPP_NAMESPACE::Fence * pFence,
11866  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11867  {
11868  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11869  return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device,
11870  reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
11871  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11872  reinterpret_cast<VkFence *>( pFence ) ) );
11873  }
11874 
11875 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11876  template <typename Dispatch>
11880  Dispatch const & d ) const
11881  {
11882  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11883 
11885  VkResult result = d.vkRegisterDeviceEventEXT(
11886  m_device,
11887  reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
11888  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11889  reinterpret_cast<VkFence *>( &fence ) );
11890  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
11891 
11892  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
11893  }
11894 
11895 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11896  template <typename Dispatch>
11900  Dispatch const & d ) const
11901  {
11902  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11903 
11905  VkResult result = d.vkRegisterDeviceEventEXT(
11906  m_device,
11907  reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
11908  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11909  reinterpret_cast<VkFence *>( &fence ) );
11910  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" );
11911 
11912  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
11914  }
11915 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
11916 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11917 
11918  template <typename Dispatch>
11920  const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
11921  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
11922  VULKAN_HPP_NAMESPACE::Fence * pFence,
11923  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11924  {
11925  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11926  return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device,
11927  static_cast<VkDisplayKHR>( display ),
11928  reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
11929  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11930  reinterpret_cast<VkFence *>( pFence ) ) );
11931  }
11932 
11933 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11934  template <typename Dispatch>
11937  const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
11939  Dispatch const & d ) const
11940  {
11941  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11942 
11944  VkResult result = d.vkRegisterDisplayEventEXT(
11945  m_device,
11946  static_cast<VkDisplayKHR>( display ),
11947  reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
11948  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11949  reinterpret_cast<VkFence *>( &fence ) );
11950  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
11951 
11952  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
11953  }
11954 
11955 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11956  template <typename Dispatch>
11959  const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
11961  Dispatch const & d ) const
11962  {
11963  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11964 
11966  VkResult result = d.vkRegisterDisplayEventEXT(
11967  m_device,
11968  static_cast<VkDisplayKHR>( display ),
11969  reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
11970  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11971  reinterpret_cast<VkFence *>( &fence ) );
11972  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" );
11973 
11974  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
11976  }
11977 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
11978 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
11979 
11980  template <typename Dispatch>
11983  uint64_t * pCounterValue,
11984  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
11985  {
11986  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11987  return static_cast<Result>(
11988  d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
11989  }
11990 
11991 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11992  template <typename Dispatch>
11994  VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const
11995  {
11996  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
11997 
11998  uint64_t counterValue;
11999  VkResult result =
12000  d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
12001  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
12002 
12003  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), counterValue );
12004  }
12005 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12006 
12007  //=== VK_GOOGLE_display_timing ===
12008 
12009  template <typename Dispatch>
12012  VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
12013  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12014  {
12015  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12016  return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
12017  m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
12018  }
12019 
12020 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12021  template <typename Dispatch>
12024  {
12025  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12026 
12027  VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
12028  VkResult result = d.vkGetRefreshCycleDurationGOOGLE(
12029  m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) );
12030  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
12031 
12032  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displayTimingProperties );
12033  }
12034 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12035 
12036  template <typename Dispatch>
12039  uint32_t * pPresentationTimingCount,
12041  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12042  {
12043  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12044  return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device,
12045  static_cast<VkSwapchainKHR>( swapchain ),
12046  pPresentationTimingCount,
12047  reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
12048  }
12049 
12050 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12051  template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
12055  {
12056  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12057 
12058  std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
12059  uint32_t presentationTimingCount;
12060  VkResult result;
12061  do
12062  {
12063  result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
12064  if ( ( result == VK_SUCCESS ) && presentationTimingCount )
12065  {
12066  presentationTimings.resize( presentationTimingCount );
12067  result = d.vkGetPastPresentationTimingGOOGLE( m_device,
12068  static_cast<VkSwapchainKHR>( swapchain ),
12069  &presentationTimingCount,
12070  reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
12071  }
12072  } while ( result == VK_INCOMPLETE );
12073  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
12074  VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
12075  if ( presentationTimingCount < presentationTimings.size() )
12076  {
12077  presentationTimings.resize( presentationTimingCount );
12078  }
12079  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
12080  }
12081 
12082  template <typename PastPresentationTimingGOOGLEAllocator,
12083  typename Dispatch,
12084  typename B1,
12089  PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
12090  Dispatch const & d ) const
12091  {
12092  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12093 
12094  std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
12095  pastPresentationTimingGOOGLEAllocator );
12096  uint32_t presentationTimingCount;
12097  VkResult result;
12098  do
12099  {
12100  result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
12101  if ( ( result == VK_SUCCESS ) && presentationTimingCount )
12102  {
12103  presentationTimings.resize( presentationTimingCount );
12104  result = d.vkGetPastPresentationTimingGOOGLE( m_device,
12105  static_cast<VkSwapchainKHR>( swapchain ),
12106  &presentationTimingCount,
12107  reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
12108  }
12109  } while ( result == VK_INCOMPLETE );
12110  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
12111  VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
12112  if ( presentationTimingCount < presentationTimings.size() )
12113  {
12114  presentationTimings.resize( presentationTimingCount );
12115  }
12116  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
12117  }
12118 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12119 
12120  //=== VK_EXT_discard_rectangles ===
12121 
12122  template <typename Dispatch>
12123  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
12124  uint32_t discardRectangleCount,
12125  const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
12126  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12127  {
12128  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12129  d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
12130  }
12131 
12132 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12133  template <typename Dispatch>
12134  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
12136  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12137  {
12138  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12139 
12140  d.vkCmdSetDiscardRectangleEXT(
12141  m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
12142  }
12143 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12144 
12145  //=== VK_EXT_hdr_metadata ===
12146 
12147  template <typename Dispatch>
12148  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount,
12149  const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
12150  const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
12151  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12152  {
12153  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12154  d.vkSetHdrMetadataEXT(
12155  m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
12156  }
12157 
12158 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12159  template <typename Dispatch>
12162  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
12163  {
12164  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12165 # ifdef VULKAN_HPP_NO_EXCEPTIONS
12166  VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
12167 # else
12168  if ( swapchains.size() != metadata.size() )
12169  {
12170  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
12171  }
12172 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
12173 
12174  d.vkSetHdrMetadataEXT( m_device,
12175  swapchains.size(),
12176  reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ),
12177  reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
12178  }
12179 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12180 
12181  //=== VK_KHR_create_renderpass2 ===
12182 
12183  template <typename Dispatch>
12185  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
12186  VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
12187  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12188  {
12189  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12190  return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device,
12191  reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
12192  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
12193  reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
12194  }
12195 
12196 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12197  template <typename Dispatch>
12201  Dispatch const & d ) const
12202  {
12203  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12204 
12206  VkResult result =
12207  d.vkCreateRenderPass2KHR( m_device,
12208  reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
12209  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
12210  reinterpret_cast<VkRenderPass *>( &renderPass ) );
12211  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
12212 
12213  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
12214  }
12215 
12216 # ifndef VULKAN_HPP_NO_SMART_HANDLE
12217  template <typename Dispatch>
12221  Dispatch const & d ) const
12222  {
12223  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12224 
12226  VkResult result =
12227  d.vkCreateRenderPass2KHR( m_device,
12228  reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
12229  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
12230  reinterpret_cast<VkRenderPass *>( &renderPass ) );
12231  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" );
12232 
12233  return createResultValueType(
12234  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
12236  }
12237 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
12238 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12239 
12240  template <typename Dispatch>
12242  const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
12243  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12244  {
12245  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12246  d.vkCmdBeginRenderPass2KHR(
12247  m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
12248  }
12249 
12250 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12251  template <typename Dispatch>
12253  const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
12254  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12255  {
12256  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12257 
12258  d.vkCmdBeginRenderPass2KHR(
12259  m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
12260  }
12261 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12262 
12263  template <typename Dispatch>
12265  const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
12266  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12267  {
12268  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12269  d.vkCmdNextSubpass2KHR(
12270  m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
12271  }
12272 
12273 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12274  template <typename Dispatch>
12276  const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
12277  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12278  {
12279  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12280 
12281  d.vkCmdNextSubpass2KHR(
12282  m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
12283  }
12284 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12285 
12286  template <typename Dispatch>
12288  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12289  {
12290  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12291  d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
12292  }
12293 
12294 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12295  template <typename Dispatch>
12297  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12298  {
12299  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12300 
12301  d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
12302  }
12303 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12304 
12305  //=== VK_KHR_shared_presentable_image ===
12306 
12307 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
12308  template <typename Dispatch>
12310  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12311  {
12312  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12313  return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
12314  }
12315 #else
12316  template <typename Dispatch>
12318  Dispatch const & d ) const
12319  {
12320  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12321 
12322  VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
12323  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
12324  VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR",
12326 
12327  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
12328  }
12329 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
12330 
12331  //=== VK_KHR_external_fence_capabilities ===
12332 
12333  template <typename Dispatch>
12335  VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
12336  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12337  {
12338  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12339  d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
12340  reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
12341  reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
12342  }
12343 
12344 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12345  template <typename Dispatch>
12348  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12349  {
12350  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12351 
12352  VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
12353  d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
12354  reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
12355  reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
12356 
12357  return externalFenceProperties;
12358  }
12359 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12360 
12361 #if defined( VK_USE_PLATFORM_WIN32_KHR )
12362  //=== VK_KHR_external_fence_win32 ===
12363 
12364  template <typename Dispatch>
12365  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR(
12366  const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12367  {
12368  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12369  return static_cast<Result>(
12370  d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
12371  }
12372 
12373 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12374  template <typename Dispatch>
12376  Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const
12377  {
12378  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12379 
12380  VkResult result = d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) );
12381  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
12382 
12383  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
12384  }
12385 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12386 
12387  template <typename Dispatch>
12388  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
12389  HANDLE * pHandle,
12390  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12391  {
12392  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12393  return static_cast<Result>(
12394  d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
12395  }
12396 
12397 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12398  template <typename Dispatch>
12400  Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
12401  {
12402  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12403 
12404  HANDLE handle;
12405  VkResult result = d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
12406  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
12407 
12408  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
12409  }
12410 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12411 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
12412 
12413  //=== VK_KHR_external_fence_fd ===
12414 
12415  template <typename Dispatch>
12417  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12418  {
12419  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12420  return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
12421  }
12422 
12423 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12424  template <typename Dispatch>
12426  Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
12427  {
12428  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12429 
12430  VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
12431  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
12432 
12433  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
12434  }
12435 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12436 
12437  template <typename Dispatch>
12439  int * pFd,
12440  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12441  {
12442  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12443  return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
12444  }
12445 
12446 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12447  template <typename Dispatch>
12449  Dispatch const & d ) const
12450  {
12451  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12452 
12453  int fd;
12454  VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
12455  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
12456 
12457  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
12458  }
12459 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12460 
12461  //=== VK_KHR_performance_query ===
12462 
12463  template <typename Dispatch>
12466  uint32_t * pCounterCount,
12469  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12470  {
12471  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12472  return static_cast<Result>(
12473  d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice,
12474  queueFamilyIndex,
12475  pCounterCount,
12476  reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
12477  reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
12478  }
12479 
12480 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12481  template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
12484  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
12485  PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
12486  {
12487  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12488 
12489  std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
12490  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
12491  data;
12492  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
12493  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
12494  uint32_t counterCount;
12495  VkResult result;
12496  do
12497  {
12498  result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
12499  if ( ( result == VK_SUCCESS ) && counterCount )
12500  {
12501  counters.resize( counterCount );
12502  counterDescriptions.resize( counterCount );
12503  result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
12504  m_physicalDevice,
12505  queueFamilyIndex,
12506  &counterCount,
12507  reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
12508  reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
12509  }
12510  } while ( result == VK_INCOMPLETE );
12511  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
12512  VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
12513  VULKAN_HPP_ASSERT( counterCount <= counters.size() );
12514  if ( counterCount < counters.size() )
12515  {
12516  counters.resize( counterCount );
12517  counterDescriptions.resize( counterCount );
12518  }
12519  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
12520  }
12521 
12522  template <typename PerformanceCounterKHRAllocator,
12523  typename PerformanceCounterDescriptionKHRAllocator,
12524  typename Dispatch,
12525  typename B1,
12526  typename B2,
12529  int>::type>
12532  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
12534  PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
12535  PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
12536  Dispatch const & d ) const
12537  {
12538  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12539 
12540  std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
12541  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
12542  data(
12543  std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
12544  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
12545  std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
12546  uint32_t counterCount;
12547  VkResult result;
12548  do
12549  {
12550  result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
12551  if ( ( result == VK_SUCCESS ) && counterCount )
12552  {
12553  counters.resize( counterCount );
12554  counterDescriptions.resize( counterCount );
12555  result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
12556  m_physicalDevice,
12557  queueFamilyIndex,
12558  &counterCount,
12559  reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
12560  reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
12561  }
12562  } while ( result == VK_INCOMPLETE );
12563  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
12564  VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
12565  VULKAN_HPP_ASSERT( counterCount <= counters.size() );
12566  if ( counterCount < counters.size() )
12567  {
12568  counters.resize( counterCount );
12569  counterDescriptions.resize( counterCount );
12570  }
12571  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
12572  }
12573 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12574 
12575  template <typename Dispatch>
12576  VULKAN_HPP_INLINE void
12578  uint32_t * pNumPasses,
12579  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12580  {
12581  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12582  d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
12583  m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
12584  }
12585 
12586 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12587  template <typename Dispatch>
12589  const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12590  {
12591  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12592 
12593  uint32_t numPasses;
12594  d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
12595  m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
12596 
12597  return numPasses;
12598  }
12599 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12600 
12601  template <typename Dispatch>
12603  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12604  {
12605  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12606  return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
12607  }
12608 
12609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12610  template <typename Dispatch>
12613  {
12614  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12615 
12616  VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
12617  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
12618 
12619  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
12620  }
12621 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12622 
12623  template <typename Dispatch>
12625  {
12626  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12627  d.vkReleaseProfilingLockKHR( m_device );
12628  }
12629 
12630  //=== VK_KHR_get_surface_capabilities2 ===
12631 
12632  template <typename Dispatch>
12635  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
12636  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12637  {
12638  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12639  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
12640  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
12641  reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
12642  }
12643 
12644 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12645  template <typename Dispatch>
12648  {
12649  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12650 
12652  VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
12653  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12654  reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
12655  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
12656 
12657  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
12658  }
12659 
12660  template <typename X, typename Y, typename... Z, typename Dispatch>
12663  {
12664  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12665 
12666  StructureChain<X, Y, Z...> structureChain;
12667  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
12668  VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
12669  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12670  reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
12671  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
12672 
12673  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
12674  }
12675 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12676 
12677  template <typename Dispatch>
12679  uint32_t * pSurfaceFormatCount,
12680  VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
12681  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12682  {
12683  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12684  return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12685  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
12686  pSurfaceFormatCount,
12687  reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
12688  }
12689 
12690 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12691  template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
12694  {
12695  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12696 
12697  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
12698  uint32_t surfaceFormatCount;
12699  VkResult result;
12700  do
12701  {
12702  result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
12703  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
12704  if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
12705  {
12706  surfaceFormats.resize( surfaceFormatCount );
12707  result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12708  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12709  &surfaceFormatCount,
12710  reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
12711  }
12712  } while ( result == VK_INCOMPLETE );
12713  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
12714  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
12715  if ( surfaceFormatCount < surfaceFormats.size() )
12716  {
12717  surfaceFormats.resize( surfaceFormatCount );
12718  }
12719  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
12720  }
12721 
12722  template <typename SurfaceFormat2KHRAllocator,
12723  typename Dispatch,
12724  typename B1,
12728  SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
12729  Dispatch const & d ) const
12730  {
12731  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12732 
12733  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
12734  uint32_t surfaceFormatCount;
12735  VkResult result;
12736  do
12737  {
12738  result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
12739  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
12740  if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
12741  {
12742  surfaceFormats.resize( surfaceFormatCount );
12743  result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12744  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12745  &surfaceFormatCount,
12746  reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
12747  }
12748  } while ( result == VK_INCOMPLETE );
12749  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
12750  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
12751  if ( surfaceFormatCount < surfaceFormats.size() )
12752  {
12753  surfaceFormats.resize( surfaceFormatCount );
12754  }
12755  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
12756  }
12757 
12758  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
12761  {
12762  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12763 
12764  std::vector<StructureChain, StructureChainAllocator> structureChains;
12765  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
12766  uint32_t surfaceFormatCount;
12767  VkResult result;
12768  do
12769  {
12770  result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
12771  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
12772  if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
12773  {
12774  structureChains.resize( surfaceFormatCount );
12775  surfaceFormats.resize( surfaceFormatCount );
12776  for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
12777  {
12778  surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
12779  }
12780  result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12781  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12782  &surfaceFormatCount,
12783  reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
12784  }
12785  } while ( result == VK_INCOMPLETE );
12786  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
12787  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
12788  if ( surfaceFormatCount < surfaceFormats.size() )
12789  {
12790  structureChains.resize( surfaceFormatCount );
12791  }
12792  for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
12793  {
12794  structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
12795  }
12796  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
12797  }
12798 
12799  template <typename StructureChain,
12800  typename StructureChainAllocator,
12801  typename Dispatch,
12802  typename B1,
12806  StructureChainAllocator & structureChainAllocator,
12807  Dispatch const & d ) const
12808  {
12809  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12810 
12811  std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
12812  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
12813  uint32_t surfaceFormatCount;
12814  VkResult result;
12815  do
12816  {
12817  result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
12818  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
12819  if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
12820  {
12821  structureChains.resize( surfaceFormatCount );
12822  surfaceFormats.resize( surfaceFormatCount );
12823  for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
12824  {
12825  surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
12826  }
12827  result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12828  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12829  &surfaceFormatCount,
12830  reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
12831  }
12832  } while ( result == VK_INCOMPLETE );
12833  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
12834  VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
12835  if ( surfaceFormatCount < surfaceFormats.size() )
12836  {
12837  structureChains.resize( surfaceFormatCount );
12838  }
12839  for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
12840  {
12841  structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
12842  }
12843  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
12844  }
12845 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12846 
12847  //=== VK_KHR_get_display_properties2 ===
12848 
12849  template <typename Dispatch>
12852  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12853  {
12854  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12855  return static_cast<Result>(
12856  d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
12857  }
12858 
12859 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12860  template <typename DisplayProperties2KHRAllocator, typename Dispatch>
12863  PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
12864  {
12865  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12866 
12867  std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
12868  uint32_t propertyCount;
12869  VkResult result;
12870  do
12871  {
12872  result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
12873  if ( ( result == VK_SUCCESS ) && propertyCount )
12874  {
12875  properties.resize( propertyCount );
12876  result =
12877  d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
12878  }
12879  } while ( result == VK_INCOMPLETE );
12880  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
12881  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
12882  if ( propertyCount < properties.size() )
12883  {
12884  properties.resize( propertyCount );
12885  }
12886  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
12887  }
12888 
12889  template <typename DisplayProperties2KHRAllocator,
12890  typename Dispatch,
12891  typename B1,
12895  PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const
12896  {
12897  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12898 
12899  std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
12900  uint32_t propertyCount;
12901  VkResult result;
12902  do
12903  {
12904  result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
12905  if ( ( result == VK_SUCCESS ) && propertyCount )
12906  {
12907  properties.resize( propertyCount );
12908  result =
12909  d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
12910  }
12911  } while ( result == VK_INCOMPLETE );
12912  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
12913  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
12914  if ( propertyCount < properties.size() )
12915  {
12916  properties.resize( propertyCount );
12917  }
12918  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
12919  }
12920 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12921 
12922  template <typename Dispatch>
12925  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
12926  {
12927  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12928  return static_cast<Result>(
12929  d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
12930  }
12931 
12932 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12933  template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
12937  {
12938  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12939 
12940  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
12941  uint32_t propertyCount;
12942  VkResult result;
12943  do
12944  {
12945  result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
12946  if ( ( result == VK_SUCCESS ) && propertyCount )
12947  {
12948  properties.resize( propertyCount );
12949  result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
12950  m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
12951  }
12952  } while ( result == VK_INCOMPLETE );
12953  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
12954  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
12955  if ( propertyCount < properties.size() )
12956  {
12957  properties.resize( propertyCount );
12958  }
12959  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
12960  }
12961 
12962  template <typename DisplayPlaneProperties2KHRAllocator,
12963  typename Dispatch,
12964  typename B1,
12968  PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
12969  {
12970  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
12971 
12972  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator );
12973  uint32_t propertyCount;
12974  VkResult result;
12975  do
12976  {
12977  result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
12978  if ( ( result == VK_SUCCESS ) && propertyCount )
12979  {
12980  properties.resize( propertyCount );
12981  result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
12982  m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
12983  }
12984  } while ( result == VK_INCOMPLETE );
12985  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
12986  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
12987  if ( propertyCount < properties.size() )
12988  {
12989  properties.resize( propertyCount );
12990  }
12991  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
12992  }
12993 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
12994 
12995  template <typename Dispatch>
12997  uint32_t * pPropertyCount,
12999  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13000  {
13001  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13002  return static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
13003  m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
13004  }
13005 
13006 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13007  template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
13011  {
13012  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13013 
13014  std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
13015  uint32_t propertyCount;
13016  VkResult result;
13017  do
13018  {
13019  result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
13020  if ( ( result == VK_SUCCESS ) && propertyCount )
13021  {
13022  properties.resize( propertyCount );
13023  result = d.vkGetDisplayModeProperties2KHR(
13024  m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
13025  }
13026  } while ( result == VK_INCOMPLETE );
13027  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
13028  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
13029  if ( propertyCount < properties.size() )
13030  {
13031  properties.resize( propertyCount );
13032  }
13033  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
13034  }
13035 
13036  template <typename DisplayModeProperties2KHRAllocator,
13037  typename Dispatch,
13038  typename B1,
13043  DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
13044  Dispatch const & d ) const
13045  {
13046  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13047 
13048  std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator );
13049  uint32_t propertyCount;
13050  VkResult result;
13051  do
13052  {
13053  result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
13054  if ( ( result == VK_SUCCESS ) && propertyCount )
13055  {
13056  properties.resize( propertyCount );
13057  result = d.vkGetDisplayModeProperties2KHR(
13058  m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
13059  }
13060  } while ( result == VK_INCOMPLETE );
13061  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
13062  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
13063  if ( propertyCount < properties.size() )
13064  {
13065  properties.resize( propertyCount );
13066  }
13067  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
13068  }
13069 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13070 
13071  template <typename Dispatch>
13075  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13076  {
13077  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13078  return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
13079  reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
13080  reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
13081  }
13082 
13083 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13084  template <typename Dispatch>
13087  {
13088  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13089 
13091  VkResult result = d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
13092  reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
13093  reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
13094  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
13095 
13096  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
13097  }
13098 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13099 
13100 #if defined( VK_USE_PLATFORM_IOS_MVK )
13101  //=== VK_MVK_ios_surface ===
13102 
13103  template <typename Dispatch>
13104  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
13105  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13107  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13108  {
13109  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13110  return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance,
13111  reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
13112  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13113  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
13114  }
13115 
13116 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13117  template <typename Dispatch>
13119  Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
13120  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
13121  Dispatch const & d ) const
13122  {
13123  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13124 
13126  VkResult result =
13127  d.vkCreateIOSSurfaceMVK( m_instance,
13128  reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
13129  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13130  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
13131  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
13132 
13133  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
13134  }
13135 
13136 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13137  template <typename Dispatch>
13138  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
13139  Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
13140  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
13141  Dispatch const & d ) const
13142  {
13143  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13144 
13146  VkResult result =
13147  d.vkCreateIOSSurfaceMVK( m_instance,
13148  reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
13149  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13150  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
13151  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" );
13152 
13153  return createResultValueType(
13154  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
13155  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
13156  }
13157 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
13158 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13159 #endif /*VK_USE_PLATFORM_IOS_MVK*/
13160 
13161 #if defined( VK_USE_PLATFORM_MACOS_MVK )
13162  //=== VK_MVK_macos_surface ===
13163 
13164  template <typename Dispatch>
13165  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
13166  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13168  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13169  {
13170  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13171  return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance,
13172  reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
13173  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13174  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
13175  }
13176 
13177 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13178  template <typename Dispatch>
13180  Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
13181  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
13182  Dispatch const & d ) const
13183  {
13184  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13185 
13187  VkResult result =
13188  d.vkCreateMacOSSurfaceMVK( m_instance,
13189  reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
13190  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13191  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
13192  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
13193 
13194  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
13195  }
13196 
13197 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13198  template <typename Dispatch>
13199  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
13200  Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
13201  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
13202  Dispatch const & d ) const
13203  {
13204  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13205 
13207  VkResult result =
13208  d.vkCreateMacOSSurfaceMVK( m_instance,
13209  reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
13210  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13211  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
13212  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" );
13213 
13214  return createResultValueType(
13215  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
13216  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
13217  }
13218 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
13219 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13220 #endif /*VK_USE_PLATFORM_MACOS_MVK*/
13221 
13222  //=== VK_EXT_debug_utils ===
13223 
13224  template <typename Dispatch>
13226  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13227  {
13228  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13229  return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
13230  }
13231 
13232 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13233  template <typename Dispatch>
13236  {
13237  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13238 
13239  VkResult result = d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
13240  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
13241 
13242  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
13243  }
13244 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13245 
13246  template <typename Dispatch>
13248  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13249  {
13250  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13251  return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
13252  }
13253 
13254 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13255  template <typename Dispatch>
13258  {
13259  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13260 
13261  VkResult result = d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
13262  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
13263 
13264  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
13265  }
13266 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13267 
13268  template <typename Dispatch>
13270  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13271  {
13272  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13273  d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
13274  }
13275 
13276 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13277  template <typename Dispatch>
13279  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13280  {
13281  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13282 
13283  d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
13284  }
13285 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13286 
13287  template <typename Dispatch>
13289  {
13290  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13291  d.vkQueueEndDebugUtilsLabelEXT( m_queue );
13292  }
13293 
13294  template <typename Dispatch>
13296  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13297  {
13298  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13299  d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
13300  }
13301 
13302 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13303  template <typename Dispatch>
13305  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13306  {
13307  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13308 
13309  d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
13310  }
13311 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13312 
13313  template <typename Dispatch>
13315  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13316  {
13317  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13318  d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
13319  }
13320 
13321 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13322  template <typename Dispatch>
13324  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13325  {
13326  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13327 
13328  d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
13329  }
13330 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13331 
13332  template <typename Dispatch>
13334  {
13335  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13336  d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
13337  }
13338 
13339  template <typename Dispatch>
13341  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13342  {
13343  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13344  d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
13345  }
13346 
13347 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13348  template <typename Dispatch>
13350  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13351  {
13352  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13353 
13354  d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
13355  }
13356 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13357 
13358  template <typename Dispatch>
13361  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13363  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13364  {
13365  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13366  return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance,
13367  reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
13368  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13369  reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
13370  }
13371 
13372 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13373  template <typename Dispatch>
13377  Dispatch const & d ) const
13378  {
13379  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13380 
13382  VkResult result = d.vkCreateDebugUtilsMessengerEXT(
13383  m_instance,
13384  reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
13385  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13386  reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
13387  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
13388 
13389  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), messenger );
13390  }
13391 
13392 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13393  template <typename Dispatch>
13397  Dispatch const & d ) const
13398  {
13399  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13400 
13402  VkResult result = d.vkCreateDebugUtilsMessengerEXT(
13403  m_instance,
13404  reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
13405  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13406  reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
13407  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" );
13408 
13409  return createResultValueType(
13410  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
13412  }
13413 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
13414 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13415 
13416  template <typename Dispatch>
13418  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13419  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13420  {
13421  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13422  d.vkDestroyDebugUtilsMessengerEXT(
13423  m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
13424  }
13425 
13426 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13427  template <typename Dispatch>
13430  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13431  {
13432  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13433 
13434  d.vkDestroyDebugUtilsMessengerEXT(
13435  m_instance,
13436  static_cast<VkDebugUtilsMessengerEXT>( messenger ),
13437  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
13438  }
13439 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13440 
13441  template <typename Dispatch>
13443  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13444  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13445  {
13446  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13447  d.vkDestroyDebugUtilsMessengerEXT(
13448  m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
13449  }
13450 
13451 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13452  template <typename Dispatch>
13455  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13456  {
13457  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13458 
13459  d.vkDestroyDebugUtilsMessengerEXT(
13460  m_instance,
13461  static_cast<VkDebugUtilsMessengerEXT>( messenger ),
13462  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
13463  }
13464 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13465 
13466  template <typename Dispatch>
13470  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13471  {
13472  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13473  d.vkSubmitDebugUtilsMessageEXT( m_instance,
13474  static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
13475  static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
13476  reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
13477  }
13478 
13479 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13480  template <typename Dispatch>
13484  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13485  {
13486  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13487 
13488  d.vkSubmitDebugUtilsMessageEXT( m_instance,
13489  static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
13490  static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
13491  reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
13492  }
13493 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13494 
13495 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
13496  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
13497 
13498  template <typename Dispatch>
13500  Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer,
13501  VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
13502  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13503  {
13504  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13505  return static_cast<Result>(
13506  d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
13507  }
13508 
13509 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13510  template <typename Dispatch>
13512  Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
13513  {
13514  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13515 
13516  VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
13517  VkResult result =
13518  d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
13519  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
13520 
13521  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
13522  }
13523 
13524  template <typename X, typename Y, typename... Z, typename Dispatch>
13525  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
13526  Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
13527  {
13528  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13529 
13530  StructureChain<X, Y, Z...> structureChain;
13531  VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
13532  structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
13533  VkResult result =
13534  d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
13535  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
13536 
13537  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
13538  }
13539 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13540 
13541  template <typename Dispatch>
13543  Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
13544  struct AHardwareBuffer ** pBuffer,
13545  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13546  {
13547  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13548  return static_cast<Result>(
13549  d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
13550  }
13551 
13552 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13553  template <typename Dispatch>
13555  Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const
13556  {
13557  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13558 
13559  struct AHardwareBuffer * buffer;
13560  VkResult result =
13561  d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer );
13562  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
13563 
13564  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
13565  }
13566 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13567 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
13568 
13569  //=== VK_EXT_sample_locations ===
13570 
13571  template <typename Dispatch>
13573  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13574  {
13575  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13576  d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
13577  }
13578 
13579 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13580  template <typename Dispatch>
13582  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13583  {
13584  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13585 
13586  d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
13587  }
13588 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13589 
13590  template <typename Dispatch>
13592  VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
13593  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13594  {
13595  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13596  d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
13597  m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
13598  }
13599 
13600 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13601  template <typename Dispatch>
13604  {
13605  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13606 
13607  VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
13608  d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
13609  m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
13610 
13611  return multisampleProperties;
13612  }
13613 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13614 
13615  //=== VK_KHR_get_memory_requirements2 ===
13616 
13617  template <typename Dispatch>
13619  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
13620  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13621  {
13622  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13623  d.vkGetImageMemoryRequirements2KHR(
13624  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
13625  }
13626 
13627 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13628  template <typename Dispatch>
13631  {
13632  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13633 
13634  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
13635  d.vkGetImageMemoryRequirements2KHR(
13636  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
13637 
13638  return memoryRequirements;
13639  }
13640 
13641  template <typename X, typename Y, typename... Z, typename Dispatch>
13644  {
13645  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13646 
13647  StructureChain<X, Y, Z...> structureChain;
13648  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
13649  d.vkGetImageMemoryRequirements2KHR(
13650  m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
13651 
13652  return structureChain;
13653  }
13654 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13655 
13656  template <typename Dispatch>
13658  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
13659  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13660  {
13661  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13662  d.vkGetBufferMemoryRequirements2KHR(
13663  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
13664  }
13665 
13666 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13667  template <typename Dispatch>
13670  {
13671  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13672 
13673  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
13674  d.vkGetBufferMemoryRequirements2KHR(
13675  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
13676 
13677  return memoryRequirements;
13678  }
13679 
13680  template <typename X, typename Y, typename... Z, typename Dispatch>
13683  {
13684  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13685 
13686  StructureChain<X, Y, Z...> structureChain;
13687  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
13688  d.vkGetBufferMemoryRequirements2KHR(
13689  m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
13690 
13691  return structureChain;
13692  }
13693 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13694 
13695  template <typename Dispatch>
13697  uint32_t * pSparseMemoryRequirementCount,
13698  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
13699  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13700  {
13701  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13702  d.vkGetImageSparseMemoryRequirements2KHR( m_device,
13703  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
13704  pSparseMemoryRequirementCount,
13705  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
13706  }
13707 
13708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13709  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
13710  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
13712  {
13713  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13714 
13715  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
13716  uint32_t sparseMemoryRequirementCount;
13717  d.vkGetImageSparseMemoryRequirements2KHR(
13718  m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
13719  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
13720  d.vkGetImageSparseMemoryRequirements2KHR( m_device,
13721  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
13722  &sparseMemoryRequirementCount,
13723  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
13724 
13725  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
13726  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
13727  {
13728  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
13729  }
13730  return sparseMemoryRequirements;
13731  }
13732 
13733  template <typename SparseImageMemoryRequirements2Allocator,
13734  typename Dispatch,
13735  typename B1,
13737  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
13739  SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
13740  Dispatch const & d ) const
13741  {
13742  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13743 
13744  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
13745  sparseImageMemoryRequirements2Allocator );
13746  uint32_t sparseMemoryRequirementCount;
13747  d.vkGetImageSparseMemoryRequirements2KHR(
13748  m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
13749  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
13750  d.vkGetImageSparseMemoryRequirements2KHR( m_device,
13751  reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
13752  &sparseMemoryRequirementCount,
13753  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
13754 
13755  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
13756  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
13757  {
13758  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
13759  }
13760  return sparseMemoryRequirements;
13761  }
13762 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13763 
13764  //=== VK_KHR_acceleration_structure ===
13765 
13766  template <typename Dispatch>
13769  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13770  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
13771  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13772  {
13773  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13774  return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device,
13775  reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
13776  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13777  reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
13778  }
13779 
13780 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13781  template <typename Dispatch>
13785  Dispatch const & d ) const
13786  {
13787  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13788 
13789  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
13790  VkResult result = d.vkCreateAccelerationStructureKHR(
13791  m_device,
13792  reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
13793  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13794  reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
13795  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
13796 
13797  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
13798  }
13799 
13800 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13801  template <typename Dispatch>
13805  Dispatch const & d ) const
13806  {
13807  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13808 
13809  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
13810  VkResult result = d.vkCreateAccelerationStructureKHR(
13811  m_device,
13812  reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
13813  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13814  reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
13815  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" );
13816 
13817  return createResultValueType(
13818  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
13820  }
13821 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
13822 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13823 
13824  template <typename Dispatch>
13826  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13827  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13828  {
13829  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13830  d.vkDestroyAccelerationStructureKHR(
13831  m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
13832  }
13833 
13834 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13835  template <typename Dispatch>
13838  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13839  {
13840  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13841 
13842  d.vkDestroyAccelerationStructureKHR(
13843  m_device,
13844  static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
13845  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
13846  }
13847 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13848 
13849  template <typename Dispatch>
13851  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
13852  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13853  {
13854  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13855  d.vkDestroyAccelerationStructureKHR(
13856  m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
13857  }
13858 
13859 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13860  template <typename Dispatch>
13863  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13864  {
13865  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13866 
13867  d.vkDestroyAccelerationStructureKHR(
13868  m_device,
13869  static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
13870  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
13871  }
13872 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13873 
13874  template <typename Dispatch>
13875  VULKAN_HPP_INLINE void
13878  const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
13879  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13880  {
13881  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13882  d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
13883  infoCount,
13884  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
13885  reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
13886  }
13887 
13888 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13889  template <typename Dispatch>
13893  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
13894  {
13895  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13896 # ifdef VULKAN_HPP_NO_EXCEPTIONS
13897  VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
13898 # else
13899  if ( infos.size() != pBuildRangeInfos.size() )
13900  {
13901  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
13902  }
13903 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
13904 
13905  d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
13906  infos.size(),
13907  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
13908  reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
13909  }
13910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13911 
13912  template <typename Dispatch>
13915  const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
13916  const uint32_t * pIndirectStrides,
13917  const uint32_t * const * ppMaxPrimitiveCounts,
13918  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13919  {
13920  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13921  d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
13922  infoCount,
13923  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
13924  reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
13925  pIndirectStrides,
13926  ppMaxPrimitiveCounts );
13927  }
13928 
13929 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13930  template <typename Dispatch>
13934  VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
13936  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
13937  {
13938  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13939 # ifdef VULKAN_HPP_NO_EXCEPTIONS
13940  VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
13941  VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
13942  VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
13943 # else
13944  if ( infos.size() != indirectDeviceAddresses.size() )
13945  {
13946  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
13947  }
13948  if ( infos.size() != indirectStrides.size() )
13949  {
13950  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
13951  }
13952  if ( infos.size() != pMaxPrimitiveCounts.size() )
13953  {
13954  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
13955  }
13956 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
13957 
13958  d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
13959  infos.size(),
13960  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
13961  reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
13962  indirectStrides.data(),
13963  pMaxPrimitiveCounts.data() );
13964  }
13965 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
13966 
13967  template <typename Dispatch>
13970  uint32_t infoCount,
13972  const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
13973  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
13974  {
13975  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13976  return static_cast<Result>(
13977  d.vkBuildAccelerationStructuresKHR( m_device,
13978  static_cast<VkDeferredOperationKHR>( deferredOperation ),
13979  infoCount,
13980  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
13981  reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
13982  }
13983 
13984 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13985  template <typename Dispatch>
13990  Dispatch const & d ) const
13991  {
13992  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
13993 # ifdef VULKAN_HPP_NO_EXCEPTIONS
13994  VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
13995 # else
13996  if ( infos.size() != pBuildRangeInfos.size() )
13997  {
13998  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
13999  }
14000 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
14001 
14002  VkResult result =
14003  d.vkBuildAccelerationStructuresKHR( m_device,
14004  static_cast<VkDeferredOperationKHR>( deferredOperation ),
14005  infos.size(),
14006  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
14007  reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
14008  resultCheck(
14009  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14010  VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
14012 
14013  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
14014  }
14015 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14016 
14017  template <typename Dispatch>
14020  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14021  {
14022  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14023  return static_cast<Result>( d.vkCopyAccelerationStructureKHR(
14024  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
14025  }
14026 
14027 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14028  template <typename Dispatch>
14032  Dispatch const & d ) const
14033  {
14034  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14035 
14036  VkResult result = d.vkCopyAccelerationStructureKHR(
14037  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
14038  resultCheck(
14039  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14040  VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
14042 
14043  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
14044  }
14045 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14046 
14047  template <typename Dispatch>
14051  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14052  {
14053  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14054  return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
14055  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
14056  }
14057 
14058 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14059  template <typename Dispatch>
14063  Dispatch const & d ) const
14064  {
14065  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14066 
14067  VkResult result = d.vkCopyAccelerationStructureToMemoryKHR(
14068  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
14069  resultCheck(
14070  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14071  VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
14073 
14074  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
14075  }
14076 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14077 
14078  template <typename Dispatch>
14082  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14083  {
14084  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14085  return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
14086  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
14087  }
14088 
14089 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14090  template <typename Dispatch>
14094  Dispatch const & d ) const
14095  {
14096  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14097 
14098  VkResult result = d.vkCopyMemoryToAccelerationStructureKHR(
14099  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
14100  resultCheck(
14101  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14102  VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
14104 
14105  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
14106  }
14107 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14108 
14109  template <typename Dispatch>
14111  Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
14112  const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
14114  size_t dataSize,
14115  void * pData,
14116  size_t stride,
14117  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14118  {
14119  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14120  return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
14121  accelerationStructureCount,
14122  reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
14123  static_cast<VkQueryType>( queryType ),
14124  dataSize,
14125  pData,
14126  stride ) );
14127  }
14128 
14129 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14130  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
14135  size_t dataSize,
14136  size_t stride,
14137  Dispatch const & d ) const
14138  {
14139  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14140 
14141  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
14142  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
14143  VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
14144  accelerationStructures.size(),
14145  reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
14146  static_cast<VkQueryType>( queryType ),
14147  data.size() * sizeof( DataType ),
14148  reinterpret_cast<void *>( data.data() ),
14149  stride );
14150  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
14151 
14152  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
14153  }
14154 
14155  template <typename DataType, typename Dispatch>
14159  size_t stride,
14160  Dispatch const & d ) const
14161  {
14162  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14163 
14164  DataType data;
14165  VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
14166  accelerationStructures.size(),
14167  reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
14168  static_cast<VkQueryType>( queryType ),
14169  sizeof( DataType ),
14170  reinterpret_cast<void *>( &data ),
14171  stride );
14172  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
14173 
14174  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
14175  }
14176 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14177 
14178  template <typename Dispatch>
14180  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14181  {
14182  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14183  d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
14184  }
14185 
14186 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14187  template <typename Dispatch>
14189  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14190  {
14191  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14192 
14193  d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
14194  }
14195 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14196 
14197  template <typename Dispatch>
14199  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14200  {
14201  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14202  d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
14203  }
14204 
14205 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14206  template <typename Dispatch>
14208  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14209  {
14210  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14211 
14212  d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
14213  }
14214 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14215 
14216  template <typename Dispatch>
14218  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14219  {
14220  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14221  d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
14222  }
14223 
14224 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14225  template <typename Dispatch>
14227  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14228  {
14229  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14230 
14231  d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
14232  }
14233 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14234 
14235  template <typename Dispatch>
14237  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14238  {
14239  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14240  return static_cast<DeviceAddress>(
14241  d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
14242  }
14243 
14244 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14245  template <typename Dispatch>
14248  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14249  {
14250  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14251 
14252  VkDeviceAddress result =
14253  d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
14254 
14255  return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
14256  }
14257 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14258 
14259  template <typename Dispatch>
14260  VULKAN_HPP_INLINE void
14261  CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
14262  const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
14265  uint32_t firstQuery,
14266  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14267  {
14268  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14269  d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
14270  accelerationStructureCount,
14271  reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
14272  static_cast<VkQueryType>( queryType ),
14273  static_cast<VkQueryPool>( queryPool ),
14274  firstQuery );
14275  }
14276 
14277 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14278  template <typename Dispatch>
14283  uint32_t firstQuery,
14284  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14285  {
14286  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14287 
14288  d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
14289  accelerationStructures.size(),
14290  reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
14291  static_cast<VkQueryType>( queryType ),
14292  static_cast<VkQueryPool>( queryPool ),
14293  firstQuery );
14294  }
14295 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14296 
14297  template <typename Dispatch>
14300  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14301  {
14302  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14303  d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
14304  reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
14305  reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
14306  }
14307 
14308 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14309  template <typename Dispatch>
14312  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14313  {
14314  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14315 
14317  d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
14318  reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
14319  reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
14320 
14321  return compatibility;
14322  }
14323 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14324 
14325  template <typename Dispatch>
14328  const uint32_t * pMaxPrimitiveCounts,
14330  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14331  {
14332  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14333  d.vkGetAccelerationStructureBuildSizesKHR( m_device,
14334  static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
14335  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
14336  pMaxPrimitiveCounts,
14337  reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
14338  }
14339 
14340 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14341  template <typename Dispatch>
14345  VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts,
14346  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
14347  {
14348  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14349 # ifdef VULKAN_HPP_NO_EXCEPTIONS
14350  VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
14351 # else
14352  if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
14353  {
14354  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
14355  }
14356 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
14357 
14359  d.vkGetAccelerationStructureBuildSizesKHR( m_device,
14360  static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
14361  reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
14362  maxPrimitiveCounts.data(),
14363  reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
14364 
14365  return sizeInfo;
14366  }
14367 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14368 
14369  //=== VK_KHR_sampler_ycbcr_conversion ===
14370 
14371  template <typename Dispatch>
14374  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14376  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14377  {
14378  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14379  return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device,
14380  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
14381  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
14382  reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
14383  }
14384 
14385 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14386  template <typename Dispatch>
14390  Dispatch const & d ) const
14391  {
14392  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14393 
14395  VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
14396  m_device,
14397  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
14398  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14399  reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
14400  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
14401 
14402  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
14403  }
14404 
14405 # ifndef VULKAN_HPP_NO_SMART_HANDLE
14406  template <typename Dispatch>
14410  Dispatch const & d ) const
14411  {
14412  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14413 
14415  VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
14416  m_device,
14417  reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
14418  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14419  reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
14420  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" );
14421 
14422  return createResultValueType(
14423  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14425  }
14426 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
14427 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14428 
14429  template <typename Dispatch>
14431  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14432  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14433  {
14434  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14435  d.vkDestroySamplerYcbcrConversionKHR(
14436  m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14437  }
14438 
14439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14440  template <typename Dispatch>
14443  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14444  {
14445  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14446 
14447  d.vkDestroySamplerYcbcrConversionKHR(
14448  m_device,
14449  static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
14450  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14451  }
14452 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14453 
14454  //=== VK_KHR_bind_memory2 ===
14455 
14456  template <typename Dispatch>
14458  const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
14459  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14460  {
14461  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14462  return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
14463  }
14464 
14465 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14466  template <typename Dispatch>
14469  Dispatch const & d ) const
14470  {
14471  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14472 
14473  VkResult result = d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
14474  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
14475 
14476  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
14477  }
14478 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14479 
14480  template <typename Dispatch>
14482  const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
14483  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14484  {
14485  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14486  return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
14487  }
14488 
14489 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14490  template <typename Dispatch>
14493  {
14494  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14495 
14496  VkResult result = d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
14497  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
14498 
14499  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
14500  }
14501 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14502 
14503  //=== VK_EXT_image_drm_format_modifier ===
14504 
14505  template <typename Dispatch>
14508  {
14509  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14510  return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
14511  m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
14512  }
14513 
14514 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14515  template <typename Dispatch>
14518  {
14519  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14520 
14522  VkResult result = d.vkGetImageDrmFormatModifierPropertiesEXT(
14523  m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
14524  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
14525 
14526  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
14527  }
14528 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14529 
14530  //=== VK_EXT_validation_cache ===
14531 
14532  template <typename Dispatch>
14534  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14535  VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
14536  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14537  {
14538  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14539  return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device,
14540  reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
14541  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
14542  reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
14543  }
14544 
14545 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14546  template <typename Dispatch>
14550  Dispatch const & d ) const
14551  {
14552  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14553 
14555  VkResult result = d.vkCreateValidationCacheEXT(
14556  m_device,
14557  reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
14558  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14559  reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
14560  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
14561 
14562  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), validationCache );
14563  }
14564 
14565 # ifndef VULKAN_HPP_NO_SMART_HANDLE
14566  template <typename Dispatch>
14570  Dispatch const & d ) const
14571  {
14572  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14573 
14575  VkResult result = d.vkCreateValidationCacheEXT(
14576  m_device,
14577  reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
14578  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14579  reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
14580  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" );
14581 
14582  return createResultValueType(
14583  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14585  }
14586 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
14587 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14588 
14589  template <typename Dispatch>
14591  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14592  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14593  {
14594  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14595  d.vkDestroyValidationCacheEXT(
14596  m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14597  }
14598 
14599 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14600  template <typename Dispatch>
14603  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14604  {
14605  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14606 
14607  d.vkDestroyValidationCacheEXT(
14608  m_device,
14609  static_cast<VkValidationCacheEXT>( validationCache ),
14610  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14611  }
14612 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14613 
14614  template <typename Dispatch>
14616  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14617  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14618  {
14619  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14620  d.vkDestroyValidationCacheEXT(
14621  m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14622  }
14623 
14624 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14625  template <typename Dispatch>
14628  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14629  {
14630  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14631 
14632  d.vkDestroyValidationCacheEXT(
14633  m_device,
14634  static_cast<VkValidationCacheEXT>( validationCache ),
14635  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14636  }
14637 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14638 
14639  template <typename Dispatch>
14641  uint32_t srcCacheCount,
14642  const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
14643  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14644  {
14645  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14646  return static_cast<Result>( d.vkMergeValidationCachesEXT(
14647  m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
14648  }
14649 
14650 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14651  template <typename Dispatch>
14655  Dispatch const & d ) const
14656  {
14657  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14658 
14659  VkResult result = d.vkMergeValidationCachesEXT(
14660  m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) );
14661  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
14662 
14663  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
14664  }
14665 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14666 
14667  template <typename Dispatch>
14669  size_t * pDataSize,
14670  void * pData,
14671  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14672  {
14673  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14674  return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
14675  }
14676 
14677 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14678  template <typename Uint8_tAllocator, typename Dispatch>
14681  {
14682  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14683 
14684  std::vector<uint8_t, Uint8_tAllocator> data;
14685  size_t dataSize;
14686  VkResult result;
14687  do
14688  {
14689  result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
14690  if ( ( result == VK_SUCCESS ) && dataSize )
14691  {
14692  data.resize( dataSize );
14693  result =
14694  d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
14695  }
14696  } while ( result == VK_INCOMPLETE );
14697  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
14698  VULKAN_HPP_ASSERT( dataSize <= data.size() );
14699  if ( dataSize < data.size() )
14700  {
14701  data.resize( dataSize );
14702  }
14703  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
14704  }
14705 
14706  template <typename Uint8_tAllocator,
14707  typename Dispatch,
14708  typename B1,
14711  Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
14712  {
14713  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14714 
14715  std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
14716  size_t dataSize;
14717  VkResult result;
14718  do
14719  {
14720  result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
14721  if ( ( result == VK_SUCCESS ) && dataSize )
14722  {
14723  data.resize( dataSize );
14724  result =
14725  d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
14726  }
14727  } while ( result == VK_INCOMPLETE );
14728  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
14729  VULKAN_HPP_ASSERT( dataSize <= data.size() );
14730  if ( dataSize < data.size() )
14731  {
14732  data.resize( dataSize );
14733  }
14734  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
14735  }
14736 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14737 
14738  //=== VK_NV_shading_rate_image ===
14739 
14740  template <typename Dispatch>
14743  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14744  {
14745  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14746  d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
14747  }
14748 
14749  template <typename Dispatch>
14751  uint32_t viewportCount,
14752  const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
14753  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14754  {
14755  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14756  d.vkCmdSetViewportShadingRatePaletteNV(
14757  m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
14758  }
14759 
14760 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14761  template <typename Dispatch>
14763  uint32_t firstViewport,
14765  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14766  {
14767  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14768 
14769  d.vkCmdSetViewportShadingRatePaletteNV(
14770  m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
14771  }
14772 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14773 
14774  template <typename Dispatch>
14776  uint32_t customSampleOrderCount,
14777  const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
14778  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14779  {
14780  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14781  d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
14782  static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
14783  customSampleOrderCount,
14784  reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
14785  }
14786 
14787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14788  template <typename Dispatch>
14789  VULKAN_HPP_INLINE void
14792  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14793  {
14794  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14795 
14796  d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
14797  static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
14798  customSampleOrders.size(),
14799  reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
14800  }
14801 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14802 
14803  //=== VK_NV_ray_tracing ===
14804 
14805  template <typename Dispatch>
14808  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14809  VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
14810  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14811  {
14812  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14813  return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device,
14814  reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
14815  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
14816  reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
14817  }
14818 
14819 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14820  template <typename Dispatch>
14824  Dispatch const & d ) const
14825  {
14826  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14827 
14828  VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
14829  VkResult result = d.vkCreateAccelerationStructureNV(
14830  m_device,
14831  reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
14832  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14833  reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
14834  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
14835 
14836  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
14837  }
14838 
14839 # ifndef VULKAN_HPP_NO_SMART_HANDLE
14840  template <typename Dispatch>
14844  Dispatch const & d ) const
14845  {
14846  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14847 
14848  VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
14849  VkResult result = d.vkCreateAccelerationStructureNV(
14850  m_device,
14851  reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
14852  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14853  reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
14854  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" );
14855 
14856  return createResultValueType(
14857  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14859  }
14860 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
14861 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14862 
14863  template <typename Dispatch>
14865  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14866  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14867  {
14868  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14869  d.vkDestroyAccelerationStructureNV(
14870  m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14871  }
14872 
14873 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14874  template <typename Dispatch>
14877  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14878  {
14879  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14880 
14881  d.vkDestroyAccelerationStructureNV(
14882  m_device,
14883  static_cast<VkAccelerationStructureNV>( accelerationStructure ),
14884  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14885  }
14886 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14887 
14888  template <typename Dispatch>
14890  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
14891  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14892  {
14893  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14894  d.vkDestroyAccelerationStructureNV(
14895  m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14896  }
14897 
14898 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14899  template <typename Dispatch>
14902  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14903  {
14904  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14905 
14906  d.vkDestroyAccelerationStructureNV(
14907  m_device,
14908  static_cast<VkAccelerationStructureNV>( accelerationStructure ),
14909  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14910  }
14911 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14912 
14913  template <typename Dispatch>
14914  VULKAN_HPP_INLINE void
14916  VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
14917  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14918  {
14919  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14920  d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
14921  reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
14922  reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
14923  }
14924 
14925 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14926  template <typename Dispatch>
14929  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14930  {
14931  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14932 
14934  d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
14935  reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
14936  reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
14937 
14938  return memoryRequirements;
14939  }
14940 
14941  template <typename X, typename Y, typename... Z, typename Dispatch>
14944  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14945  {
14946  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14947 
14948  StructureChain<X, Y, Z...> structureChain;
14949  VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
14950  d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
14951  reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
14952  reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
14953 
14954  return structureChain;
14955  }
14956 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14957 
14958  template <typename Dispatch>
14960  uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14961  {
14962  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14963  return static_cast<Result>(
14964  d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
14965  }
14966 
14967 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14968  template <typename Dispatch>
14971  {
14972  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14973 
14974  VkResult result = d.vkBindAccelerationStructureMemoryNV(
14975  m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) );
14976  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
14977 
14978  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
14979  }
14980 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
14981 
14982  template <typename Dispatch>
14984  VULKAN_HPP_NAMESPACE::Buffer instanceData,
14985  VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
14990  VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
14991  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
14992  {
14993  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
14994  d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
14995  reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ),
14996  static_cast<VkBuffer>( instanceData ),
14997  static_cast<VkDeviceSize>( instanceOffset ),
14998  static_cast<VkBool32>( update ),
14999  static_cast<VkAccelerationStructureNV>( dst ),
15000  static_cast<VkAccelerationStructureNV>( src ),
15001  static_cast<VkBuffer>( scratch ),
15002  static_cast<VkDeviceSize>( scratchOffset ) );
15003  }
15004 
15005 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15006  template <typename Dispatch>
15008  VULKAN_HPP_NAMESPACE::Buffer instanceData,
15009  VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
15014  VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
15015  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15016  {
15017  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15018 
15019  d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
15020  reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
15021  static_cast<VkBuffer>( instanceData ),
15022  static_cast<VkDeviceSize>( instanceOffset ),
15023  static_cast<VkBool32>( update ),
15024  static_cast<VkAccelerationStructureNV>( dst ),
15025  static_cast<VkAccelerationStructureNV>( src ),
15026  static_cast<VkBuffer>( scratch ),
15027  static_cast<VkDeviceSize>( scratchOffset ) );
15028  }
15029 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15030 
15031  template <typename Dispatch>
15035  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15036  {
15037  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15038  d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
15039  static_cast<VkAccelerationStructureNV>( dst ),
15040  static_cast<VkAccelerationStructureNV>( src ),
15041  static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
15042  }
15043 
15044  template <typename Dispatch>
15046  VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
15047  VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
15048  VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
15049  VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
15050  VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
15051  VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
15052  VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
15053  VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
15054  VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
15055  VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
15056  uint32_t width,
15057  uint32_t height,
15058  uint32_t depth,
15059  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15060  {
15061  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15062  d.vkCmdTraceRaysNV( m_commandBuffer,
15063  static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
15064  static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
15065  static_cast<VkBuffer>( missShaderBindingTableBuffer ),
15066  static_cast<VkDeviceSize>( missShaderBindingOffset ),
15067  static_cast<VkDeviceSize>( missShaderBindingStride ),
15068  static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
15069  static_cast<VkDeviceSize>( hitShaderBindingOffset ),
15070  static_cast<VkDeviceSize>( hitShaderBindingStride ),
15071  static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
15072  static_cast<VkDeviceSize>( callableShaderBindingOffset ),
15073  static_cast<VkDeviceSize>( callableShaderBindingStride ),
15074  width,
15075  height,
15076  depth );
15077  }
15078 
15079  template <typename Dispatch>
15081  uint32_t createInfoCount,
15083  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
15084  VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
15085  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15086  {
15087  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15088  return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device,
15089  static_cast<VkPipelineCache>( pipelineCache ),
15090  createInfoCount,
15091  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
15092  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
15093  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
15094  }
15095 
15096 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15097  template <typename PipelineAllocator, typename Dispatch>
15102  Dispatch const & d ) const
15103  {
15104  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15105 
15106  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
15107  VkResult result = d.vkCreateRayTracingPipelinesNV(
15108  m_device,
15109  static_cast<VkPipelineCache>( pipelineCache ),
15110  createInfos.size(),
15111  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
15112  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15113  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
15114  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15115  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
15117 
15119  }
15120 
15121  template <typename PipelineAllocator,
15122  typename Dispatch,
15123  typename B0,
15129  PipelineAllocator & pipelineAllocator,
15130  Dispatch const & d ) const
15131  {
15132  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15133 
15134  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
15135  VkResult result = d.vkCreateRayTracingPipelinesNV(
15136  m_device,
15137  static_cast<VkPipelineCache>( pipelineCache ),
15138  createInfos.size(),
15139  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
15140  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15141  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
15142  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15143  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
15145 
15147  }
15148 
15149  template <typename Dispatch>
15154  Dispatch const & d ) const
15155  {
15156  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15157 
15159  VkResult result = d.vkCreateRayTracingPipelinesNV(
15160  m_device,
15161  static_cast<VkPipelineCache>( pipelineCache ),
15162  1,
15163  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
15164  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15165  reinterpret_cast<VkPipeline *>( &pipeline ) );
15166  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15167  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV",
15169 
15171  }
15172 
15173 # ifndef VULKAN_HPP_NO_SMART_HANDLE
15174  template <typename Dispatch, typename PipelineAllocator>
15179  Dispatch const & d ) const
15180  {
15181  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15182 
15183  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
15184  VkResult result = d.vkCreateRayTracingPipelinesNV(
15185  m_device,
15186  static_cast<VkPipelineCache>( pipelineCache ),
15187  createInfos.size(),
15188  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
15189  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15190  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
15191  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15192  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
15194  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
15195  uniquePipelines.reserve( createInfos.size() );
15196  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
15197  for ( auto const & pipeline : pipelines )
15198  {
15199  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
15200  }
15202  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
15203  }
15204 
15205  template <typename Dispatch,
15206  typename PipelineAllocator,
15207  typename B0,
15208  typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
15213  PipelineAllocator & pipelineAllocator,
15214  Dispatch const & d ) const
15215  {
15216  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15217 
15218  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
15219  VkResult result = d.vkCreateRayTracingPipelinesNV(
15220  m_device,
15221  static_cast<VkPipelineCache>( pipelineCache ),
15222  createInfos.size(),
15223  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
15224  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15225  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
15226  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15227  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
15229  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
15230  uniquePipelines.reserve( createInfos.size() );
15231  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
15232  for ( auto const & pipeline : pipelines )
15233  {
15234  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
15235  }
15237  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
15238  }
15239 
15240  template <typename Dispatch>
15245  Dispatch const & d ) const
15246  {
15247  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15248 
15250  VkResult result = d.vkCreateRayTracingPipelinesNV(
15251  m_device,
15252  static_cast<VkPipelineCache>( pipelineCache ),
15253  1,
15254  reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
15255  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15256  reinterpret_cast<VkPipeline *>( &pipeline ) );
15257  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15258  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique",
15260 
15262  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15264  }
15265 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
15266 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15267 
15268  template <typename Dispatch>
15270  uint32_t firstGroup,
15271  uint32_t groupCount,
15272  size_t dataSize,
15273  void * pData,
15274  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15275  {
15276  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15277  return static_cast<Result>(
15278  d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
15279  }
15280 
15281 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15282  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
15284  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
15285  {
15286  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15287 
15288  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
15289  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
15290  VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
15291  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
15292  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
15293 
15294  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
15295  }
15296 
15297  template <typename DataType, typename Dispatch>
15299  Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
15300  {
15301  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15302 
15303  DataType data;
15304  VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
15305  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
15306  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
15307 
15308  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
15309  }
15310 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15311 
15312  template <typename Dispatch>
15314  size_t dataSize,
15315  void * pData,
15316  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15317  {
15318  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15319  return static_cast<Result>(
15320  d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
15321  }
15322 
15323 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15324  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
15326  Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const
15327  {
15328  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15329 
15330  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
15331  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
15332  VkResult result = d.vkGetAccelerationStructureHandleNV(
15333  m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
15334  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
15335 
15336  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
15337  }
15338 
15339  template <typename DataType, typename Dispatch>
15342  {
15343  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15344 
15345  DataType data;
15346  VkResult result = d.vkGetAccelerationStructureHandleNV(
15347  m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) );
15348  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
15349 
15350  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
15351  }
15352 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15353 
15354  template <typename Dispatch>
15356  const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
15359  uint32_t firstQuery,
15360  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15361  {
15362  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15363  d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
15364  accelerationStructureCount,
15365  reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
15366  static_cast<VkQueryType>( queryType ),
15367  static_cast<VkQueryPool>( queryPool ),
15368  firstQuery );
15369  }
15370 
15371 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15372  template <typename Dispatch>
15377  uint32_t firstQuery,
15378  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15379  {
15380  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15381 
15382  d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
15383  accelerationStructures.size(),
15384  reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
15385  static_cast<VkQueryType>( queryType ),
15386  static_cast<VkQueryPool>( queryPool ),
15387  firstQuery );
15388  }
15389 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15390 
15391 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
15392  template <typename Dispatch>
15394  uint32_t shader,
15395  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15396  {
15397  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15398  return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
15399  }
15400 #else
15401  template <typename Dispatch>
15403  Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
15404  {
15405  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15406 
15407  VkResult result = d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader );
15408  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
15409 
15410  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
15411  }
15412 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
15413 
15414  //=== VK_KHR_maintenance3 ===
15415 
15416  template <typename Dispatch>
15419  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15420  {
15421  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15422  d.vkGetDescriptorSetLayoutSupportKHR(
15423  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
15424  }
15425 
15426 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15427  template <typename Dispatch>
15430  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15431  {
15432  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15433 
15435  d.vkGetDescriptorSetLayoutSupportKHR(
15436  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
15437 
15438  return support;
15439  }
15440 
15441  template <typename X, typename Y, typename... Z, typename Dispatch>
15444  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15445  {
15446  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15447 
15448  StructureChain<X, Y, Z...> structureChain;
15449  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
15450  d.vkGetDescriptorSetLayoutSupportKHR(
15451  m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
15452 
15453  return structureChain;
15454  }
15455 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15456 
15457  //=== VK_KHR_draw_indirect_count ===
15458 
15459  template <typename Dispatch>
15462  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
15463  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
15464  uint32_t maxDrawCount,
15465  uint32_t stride,
15466  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15467  {
15468  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15469  d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
15470  static_cast<VkBuffer>( buffer ),
15471  static_cast<VkDeviceSize>( offset ),
15472  static_cast<VkBuffer>( countBuffer ),
15473  static_cast<VkDeviceSize>( countBufferOffset ),
15474  maxDrawCount,
15475  stride );
15476  }
15477 
15478  template <typename Dispatch>
15481  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
15482  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
15483  uint32_t maxDrawCount,
15484  uint32_t stride,
15485  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15486  {
15487  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15488  d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
15489  static_cast<VkBuffer>( buffer ),
15490  static_cast<VkDeviceSize>( offset ),
15491  static_cast<VkBuffer>( countBuffer ),
15492  static_cast<VkDeviceSize>( countBufferOffset ),
15493  maxDrawCount,
15494  stride );
15495  }
15496 
15497  //=== VK_EXT_external_memory_host ===
15498 
15499  template <typename Dispatch>
15502  const void * pHostPointer,
15503  VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
15504  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15505  {
15506  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15507  return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device,
15508  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
15509  pHostPointer,
15510  reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
15511  }
15512 
15513 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15514  template <typename Dispatch>
15517  const void * pHostPointer,
15518  Dispatch const & d ) const
15519  {
15520  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15521 
15522  VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
15523  VkResult result = d.vkGetMemoryHostPointerPropertiesEXT( m_device,
15524  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
15525  pHostPointer,
15526  reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
15527  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
15528 
15529  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryHostPointerProperties );
15530  }
15531 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15532 
15533  //=== VK_AMD_buffer_marker ===
15534 
15535  template <typename Dispatch>
15537  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
15539  uint32_t marker,
15540  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15541  {
15542  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15543  d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
15544  static_cast<VkPipelineStageFlagBits>( pipelineStage ),
15545  static_cast<VkBuffer>( dstBuffer ),
15546  static_cast<VkDeviceSize>( dstOffset ),
15547  marker );
15548  }
15549 
15550  //=== VK_EXT_calibrated_timestamps ===
15551 
15552  template <typename Dispatch>
15554  VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
15555  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15556  {
15557  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15558  return static_cast<Result>(
15559  d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
15560  }
15561 
15562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15563  template <typename TimeDomainEXTAllocator, typename Dispatch>
15566  {
15567  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15568 
15569  std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
15570  uint32_t timeDomainCount;
15571  VkResult result;
15572  do
15573  {
15574  result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
15575  if ( ( result == VK_SUCCESS ) && timeDomainCount )
15576  {
15577  timeDomains.resize( timeDomainCount );
15578  result =
15579  d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
15580  }
15581  } while ( result == VK_INCOMPLETE );
15582  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
15583  VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
15584  if ( timeDomainCount < timeDomains.size() )
15585  {
15586  timeDomains.resize( timeDomainCount );
15587  }
15588  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
15589  }
15590 
15591  template <typename TimeDomainEXTAllocator,
15592  typename Dispatch,
15593  typename B1,
15596  PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d ) const
15597  {
15598  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15599 
15600  std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
15601  uint32_t timeDomainCount;
15602  VkResult result;
15603  do
15604  {
15605  result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
15606  if ( ( result == VK_SUCCESS ) && timeDomainCount )
15607  {
15608  timeDomains.resize( timeDomainCount );
15609  result =
15610  d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
15611  }
15612  } while ( result == VK_INCOMPLETE );
15613  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
15614  VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
15615  if ( timeDomainCount < timeDomains.size() )
15616  {
15617  timeDomains.resize( timeDomainCount );
15618  }
15619  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
15620  }
15621 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15622 
15623  template <typename Dispatch>
15625  const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
15626  uint64_t * pTimestamps,
15627  uint64_t * pMaxDeviation,
15628  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15629  {
15630  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15631  return static_cast<Result>( d.vkGetCalibratedTimestampsEXT(
15632  m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
15633  }
15634 
15635 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15636  template <typename Uint64_tAllocator, typename Dispatch>
15639  Dispatch const & d ) const
15640  {
15641  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15642 
15643  std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
15644  std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
15645  std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
15646  uint64_t & maxDeviation = data.second;
15647  VkResult result = d.vkGetCalibratedTimestampsEXT(
15648  m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
15649  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
15650 
15651  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
15652  }
15653 
15654  template <typename Uint64_tAllocator,
15655  typename Dispatch,
15656  typename B0,
15660  Uint64_tAllocator & uint64_tAllocator,
15661  Dispatch const & d ) const
15662  {
15663  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15664 
15665  std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
15666  std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
15667  std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
15668  uint64_t & maxDeviation = data.second;
15669  VkResult result = d.vkGetCalibratedTimestampsEXT(
15670  m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
15671  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
15672 
15673  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
15674  }
15675 
15676  template <typename Dispatch>
15679  {
15680  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15681 
15682  std::pair<uint64_t, uint64_t> data;
15683  uint64_t & timestamp = data.first;
15684  uint64_t & maxDeviation = data.second;
15685  VkResult result =
15686  d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ), &timestamp, &maxDeviation );
15687  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
15688 
15689  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
15690  }
15691 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15692 
15693  //=== VK_NV_mesh_shader ===
15694 
15695  template <typename Dispatch>
15696  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15697  {
15698  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15699  d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
15700  }
15701 
15702  template <typename Dispatch>
15705  uint32_t drawCount,
15706  uint32_t stride,
15707  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15708  {
15709  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15710  d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
15711  }
15712 
15713  template <typename Dispatch>
15716  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
15717  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
15718  uint32_t maxDrawCount,
15719  uint32_t stride,
15720  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15721  {
15722  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15723  d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
15724  static_cast<VkBuffer>( buffer ),
15725  static_cast<VkDeviceSize>( offset ),
15726  static_cast<VkBuffer>( countBuffer ),
15727  static_cast<VkDeviceSize>( countBufferOffset ),
15728  maxDrawCount,
15729  stride );
15730  }
15731 
15732  //=== VK_NV_scissor_exclusive ===
15733 
15734  template <typename Dispatch>
15735  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
15736  uint32_t exclusiveScissorCount,
15737  const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
15738  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15739  {
15740  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15741  d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
15742  }
15743 
15744 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15745  template <typename Dispatch>
15746  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
15748  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15749  {
15750  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15751 
15752  d.vkCmdSetExclusiveScissorNV(
15753  m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
15754  }
15755 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15756 
15757  //=== VK_NV_device_diagnostic_checkpoints ===
15758 
15759  template <typename Dispatch>
15760  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15761  {
15762  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15763  d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
15764  }
15765 
15766 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15767  template <typename CheckpointMarkerType, typename Dispatch>
15768  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15769  {
15770  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15771 
15772  d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) );
15773  }
15774 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15775 
15776  template <typename Dispatch>
15777  VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount,
15778  VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
15779  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15780  {
15781  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15782  d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
15783  }
15784 
15785 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15786  template <typename CheckpointDataNVAllocator, typename Dispatch>
15787  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
15788  Queue::getCheckpointDataNV( Dispatch const & d ) const
15789  {
15790  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15791 
15792  std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
15793  uint32_t checkpointDataCount;
15794  d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
15795  checkpointData.resize( checkpointDataCount );
15796  d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
15797 
15798  VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
15799  if ( checkpointDataCount < checkpointData.size() )
15800  {
15801  checkpointData.resize( checkpointDataCount );
15802  }
15803  return checkpointData;
15804  }
15805 
15806  template <typename CheckpointDataNVAllocator,
15807  typename Dispatch,
15808  typename B1,
15810  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
15811  Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
15812  {
15813  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15814 
15815  std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
15816  uint32_t checkpointDataCount;
15817  d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
15818  checkpointData.resize( checkpointDataCount );
15819  d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
15820 
15821  VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
15822  if ( checkpointDataCount < checkpointData.size() )
15823  {
15824  checkpointData.resize( checkpointDataCount );
15825  }
15826  return checkpointData;
15827  }
15828 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15829 
15830  //=== VK_KHR_timeline_semaphore ===
15831 
15832  template <typename Dispatch>
15834  uint64_t * pValue,
15835  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15836  {
15837  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15838  return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
15839  }
15840 
15841 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15842  template <typename Dispatch>
15845  {
15846  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15847 
15848  uint64_t value;
15849  VkResult result = d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value );
15850  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
15851 
15852  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
15853  }
15854 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15855 
15856  template <typename Dispatch>
15858  uint64_t timeout,
15859  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15860  {
15861  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15862  return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
15863  }
15864 
15865 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15866  template <typename Dispatch>
15868  Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
15869  {
15870  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15871 
15872  VkResult result = d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
15873  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15874  VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR",
15876 
15877  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
15878  }
15879 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15880 
15881  template <typename Dispatch>
15883  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15884  {
15885  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15886  return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
15887  }
15888 
15889 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15890  template <typename Dispatch>
15892  Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
15893  {
15894  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15895 
15896  VkResult result = d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
15897  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
15898 
15899  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
15900  }
15901 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15902 
15903  //=== VK_INTEL_performance_query ===
15904 
15905  template <typename Dispatch>
15907  const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15908  {
15909  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15910  return static_cast<Result>(
15911  d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
15912  }
15913 
15914 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15915  template <typename Dispatch>
15918  {
15919  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15920 
15921  VkResult result = d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) );
15922  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
15923 
15924  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
15925  }
15926 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15927 
15928  template <typename Dispatch>
15930  {
15931  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15932  d.vkUninitializePerformanceApiINTEL( m_device );
15933  }
15934 
15935  template <typename Dispatch>
15937  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15938  {
15939  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15940  return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
15941  }
15942 
15943 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15944  template <typename Dispatch>
15947  {
15948  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15949 
15950  VkResult result = d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) );
15951  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
15952 
15953  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
15954  }
15955 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15956 
15957  template <typename Dispatch>
15959  const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15960  {
15961  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15962  return static_cast<Result>(
15963  d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
15964  }
15965 
15966 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15967  template <typename Dispatch>
15970  {
15971  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15972 
15973  VkResult result = d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) );
15974  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
15975 
15976  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
15977  }
15978 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
15979 
15980  template <typename Dispatch>
15982  const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
15983  {
15984  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15985  return static_cast<Result>(
15986  d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
15987  }
15988 
15989 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15990  template <typename Dispatch>
15993  {
15994  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
15995 
15996  VkResult result = d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) );
15997  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
15998 
15999  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
16000  }
16001 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16002 
16003  template <typename Dispatch>
16007  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16008  {
16009  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16010  return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device,
16011  reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
16012  reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
16013  }
16014 
16015 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16016  template <typename Dispatch>
16019  {
16020  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16021 
16023  VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
16024  reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
16025  reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
16026  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
16027 
16028  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), configuration );
16029  }
16030 
16031 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16032  template <typename Dispatch>
16035  Dispatch const & d ) const
16036  {
16037  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16038 
16040  VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
16041  reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
16042  reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
16043  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" );
16044 
16045  return createResultValueType(
16046  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16048  }
16049 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
16050 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16051 
16052 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16053  template <typename Dispatch>
16055  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16056  {
16057  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16058  return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
16059  }
16060 #else
16061  template <typename Dispatch>
16064  {
16065  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16066 
16067  VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
16068  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
16069 
16070  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
16071  }
16072 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16073 
16074 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16075  template <typename Dispatch>
16077  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16078  {
16079  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16080  return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
16081  }
16082 #else
16083  template <typename Dispatch>
16086  {
16087  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16088 
16089  VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
16090  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
16091 
16092  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
16093  }
16094 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16095 
16096 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16097  template <typename Dispatch>
16099  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16100  {
16101  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16102  return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
16103  }
16104 #else
16105  template <typename Dispatch>
16108  {
16109  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16110 
16111  VkResult result = d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
16112  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
16113 
16114  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
16115  }
16116 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16117 
16118  template <typename Dispatch>
16121  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16122  {
16123  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16124  return static_cast<Result>( d.vkGetPerformanceParameterINTEL(
16125  m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
16126  }
16127 
16128 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16129  template <typename Dispatch>
16132  {
16133  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16134 
16136  VkResult result = d.vkGetPerformanceParameterINTEL(
16137  m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) );
16138  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
16139 
16140  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
16141  }
16142 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16143 
16144  //=== VK_AMD_display_native_hdr ===
16145 
16146  template <typename Dispatch>
16148  VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,
16149  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16150  {
16151  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16152  d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
16153  }
16154 
16155 #if defined( VK_USE_PLATFORM_FUCHSIA )
16156  //=== VK_FUCHSIA_imagepipe_surface ===
16157 
16158  template <typename Dispatch>
16160  Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
16161  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
16163  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16164  {
16165  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16166  return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance,
16167  reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ),
16168  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
16169  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
16170  }
16171 
16172 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16173  template <typename Dispatch>
16175  Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
16176  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
16177  Dispatch const & d ) const
16178  {
16179  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16180 
16182  VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
16183  m_instance,
16184  reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
16185  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16186  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16187  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
16188 
16189  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
16190  }
16191 
16192 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16193  template <typename Dispatch>
16194  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
16195  Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
16196  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
16197  Dispatch const & d ) const
16198  {
16199  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16200 
16202  VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
16203  m_instance,
16204  reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
16205  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16206  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16207  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" );
16208 
16209  return createResultValueType(
16210  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16211  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
16212  }
16213 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
16214 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16215 #endif /*VK_USE_PLATFORM_FUCHSIA*/
16216 
16217 #if defined( VK_USE_PLATFORM_METAL_EXT )
16218  //=== VK_EXT_metal_surface ===
16219 
16220  template <typename Dispatch>
16221  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
16222  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
16224  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16225  {
16226  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16227  return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance,
16228  reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ),
16229  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
16230  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
16231  }
16232 
16233 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16234  template <typename Dispatch>
16236  Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
16237  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
16238  Dispatch const & d ) const
16239  {
16240  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16241 
16243  VkResult result =
16244  d.vkCreateMetalSurfaceEXT( m_instance,
16245  reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
16246  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16247  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16248  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
16249 
16250  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
16251  }
16252 
16253 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16254  template <typename Dispatch>
16255  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
16256  Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
16257  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
16258  Dispatch const & d ) const
16259  {
16260  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16261 
16263  VkResult result =
16264  d.vkCreateMetalSurfaceEXT( m_instance,
16265  reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
16266  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16267  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16268  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" );
16269 
16270  return createResultValueType(
16271  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16272  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
16273  }
16274 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
16275 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16276 #endif /*VK_USE_PLATFORM_METAL_EXT*/
16277 
16278  //=== VK_KHR_fragment_shading_rate ===
16279 
16280  template <typename Dispatch>
16282  PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount,
16284  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16285  {
16286  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16287  return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
16288  m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
16289  }
16290 
16291 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16292  template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
16296  {
16297  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16298 
16299  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates;
16300  uint32_t fragmentShadingRateCount;
16301  VkResult result;
16302  do
16303  {
16304  result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
16305  if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
16306  {
16307  fragmentShadingRates.resize( fragmentShadingRateCount );
16308  result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
16309  m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
16310  }
16311  } while ( result == VK_INCOMPLETE );
16312  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
16313  VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
16314  if ( fragmentShadingRateCount < fragmentShadingRates.size() )
16315  {
16316  fragmentShadingRates.resize( fragmentShadingRateCount );
16317  }
16318  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
16319  }
16320 
16321  template <typename PhysicalDeviceFragmentShadingRateKHRAllocator,
16322  typename Dispatch,
16323  typename B1,
16327  PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
16328  Dispatch const & d ) const
16329  {
16330  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16331 
16332  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates(
16333  physicalDeviceFragmentShadingRateKHRAllocator );
16334  uint32_t fragmentShadingRateCount;
16335  VkResult result;
16336  do
16337  {
16338  result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
16339  if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
16340  {
16341  fragmentShadingRates.resize( fragmentShadingRateCount );
16342  result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
16343  m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
16344  }
16345  } while ( result == VK_INCOMPLETE );
16346  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
16347  VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
16348  if ( fragmentShadingRateCount < fragmentShadingRates.size() )
16349  {
16350  fragmentShadingRates.resize( fragmentShadingRateCount );
16351  }
16352  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
16353  }
16354 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16355 
16356  template <typename Dispatch>
16359  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16360  {
16361  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16362  d.vkCmdSetFragmentShadingRateKHR(
16363  m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
16364  }
16365 
16366 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16367  template <typename Dispatch>
16370  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16371  {
16372  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16373 
16374  d.vkCmdSetFragmentShadingRateKHR(
16375  m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
16376  }
16377 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16378 
16379  //=== VK_EXT_buffer_device_address ===
16380 
16381  template <typename Dispatch>
16383  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16384  {
16385  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16386  return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
16387  }
16388 
16389 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16390  template <typename Dispatch>
16392  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16393  {
16394  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16395 
16396  VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
16397 
16398  return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
16399  }
16400 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16401 
16402  //=== VK_EXT_tooling_info ===
16403 
16404  template <typename Dispatch>
16407  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16408  {
16409  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16410  return static_cast<Result>(
16411  d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
16412  }
16413 
16414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16415  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
16418  PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
16419  {
16420  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16421 
16422  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
16423  uint32_t toolCount;
16424  VkResult result;
16425  do
16426  {
16427  result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
16428  if ( ( result == VK_SUCCESS ) && toolCount )
16429  {
16430  toolProperties.resize( toolCount );
16431  result =
16432  d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
16433  }
16434  } while ( result == VK_INCOMPLETE );
16435  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
16436  VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
16437  if ( toolCount < toolProperties.size() )
16438  {
16439  toolProperties.resize( toolCount );
16440  }
16441  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
16442  }
16443 
16444  template <typename PhysicalDeviceToolPropertiesAllocator,
16445  typename Dispatch,
16446  typename B1,
16450  PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
16451  {
16452  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16453 
16454  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
16455  physicalDeviceToolPropertiesAllocator );
16456  uint32_t toolCount;
16457  VkResult result;
16458  do
16459  {
16460  result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
16461  if ( ( result == VK_SUCCESS ) && toolCount )
16462  {
16463  toolProperties.resize( toolCount );
16464  result =
16465  d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
16466  }
16467  } while ( result == VK_INCOMPLETE );
16468  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
16469  VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
16470  if ( toolCount < toolProperties.size() )
16471  {
16472  toolProperties.resize( toolCount );
16473  }
16474  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
16475  }
16476 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16477 
16478  //=== VK_KHR_present_wait ===
16479 
16480 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16481  template <typename Dispatch>
16483  uint64_t presentId,
16484  uint64_t timeout,
16485  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16486  {
16487  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16488  return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
16489  }
16490 #else
16491  template <typename Dispatch>
16493  Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const
16494  {
16495  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16496 
16497  VkResult result = d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout );
16498  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16499  VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR",
16501 
16502  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
16503  }
16504 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16505 
16506  //=== VK_NV_cooperative_matrix ===
16507 
16508  template <typename Dispatch>
16510  uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16511  {
16512  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16513  return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
16514  m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
16515  }
16516 
16517 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16518  template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
16522  {
16523  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16524 
16525  std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
16526  uint32_t propertyCount;
16527  VkResult result;
16528  do
16529  {
16530  result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
16531  if ( ( result == VK_SUCCESS ) && propertyCount )
16532  {
16533  properties.resize( propertyCount );
16534  result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
16535  m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
16536  }
16537  } while ( result == VK_INCOMPLETE );
16538  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
16539  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
16540  if ( propertyCount < properties.size() )
16541  {
16542  properties.resize( propertyCount );
16543  }
16544  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
16545  }
16546 
16547  template <typename CooperativeMatrixPropertiesNVAllocator,
16548  typename Dispatch,
16549  typename B1,
16553  PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,
16554  Dispatch const & d ) const
16555  {
16556  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16557 
16558  std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
16559  cooperativeMatrixPropertiesNVAllocator );
16560  uint32_t propertyCount;
16561  VkResult result;
16562  do
16563  {
16564  result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
16565  if ( ( result == VK_SUCCESS ) && propertyCount )
16566  {
16567  properties.resize( propertyCount );
16568  result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
16569  m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
16570  }
16571  } while ( result == VK_INCOMPLETE );
16572  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
16573  VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
16574  if ( propertyCount < properties.size() )
16575  {
16576  properties.resize( propertyCount );
16577  }
16578  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
16579  }
16580 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16581 
16582  //=== VK_NV_coverage_reduction_mode ===
16583 
16584  template <typename Dispatch>
16586  uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16587  {
16588  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16589  return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
16590  m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
16591  }
16592 
16593 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16594  template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
16598  {
16599  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16600 
16601  std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
16602  uint32_t combinationCount;
16603  VkResult result;
16604  do
16605  {
16606  result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
16607  if ( ( result == VK_SUCCESS ) && combinationCount )
16608  {
16609  combinations.resize( combinationCount );
16610  result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
16611  m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
16612  }
16613  } while ( result == VK_INCOMPLETE );
16614  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16615  VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
16616  VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
16617  if ( combinationCount < combinations.size() )
16618  {
16619  combinations.resize( combinationCount );
16620  }
16621  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
16622  }
16623 
16624  template <typename FramebufferMixedSamplesCombinationNVAllocator,
16625  typename Dispatch,
16626  typename B1,
16631  FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const
16632  {
16633  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16634 
16635  std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
16636  framebufferMixedSamplesCombinationNVAllocator );
16637  uint32_t combinationCount;
16638  VkResult result;
16639  do
16640  {
16641  result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
16642  if ( ( result == VK_SUCCESS ) && combinationCount )
16643  {
16644  combinations.resize( combinationCount );
16645  result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
16646  m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
16647  }
16648  } while ( result == VK_INCOMPLETE );
16649  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16650  VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
16651  VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
16652  if ( combinationCount < combinations.size() )
16653  {
16654  combinations.resize( combinationCount );
16655  }
16656  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
16657  }
16658 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16659 
16660 #if defined( VK_USE_PLATFORM_WIN32_KHR )
16661  //=== VK_EXT_full_screen_exclusive ===
16662 
16663  template <typename Dispatch>
16665  PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
16666  uint32_t * pPresentModeCount,
16667  VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
16668  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16669  {
16670  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16671  return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
16672  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
16673  pPresentModeCount,
16674  reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
16675  }
16676 
16677 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16678  template <typename PresentModeKHRAllocator, typename Dispatch>
16679  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
16680  PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
16681  {
16682  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16683 
16684  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
16685  uint32_t presentModeCount;
16686  VkResult result;
16687  do
16688  {
16689  result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
16690  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
16691  if ( ( result == VK_SUCCESS ) && presentModeCount )
16692  {
16693  presentModes.resize( presentModeCount );
16694  result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
16695  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
16696  &presentModeCount,
16697  reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
16698  }
16699  } while ( result == VK_INCOMPLETE );
16700  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
16701  VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
16702  if ( presentModeCount < presentModes.size() )
16703  {
16704  presentModes.resize( presentModeCount );
16705  }
16706  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
16707  }
16708 
16709  template <typename PresentModeKHRAllocator,
16710  typename Dispatch,
16711  typename B1,
16713  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
16714  PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
16715  PresentModeKHRAllocator & presentModeKHRAllocator,
16716  Dispatch const & d ) const
16717  {
16718  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16719 
16720  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
16721  uint32_t presentModeCount;
16722  VkResult result;
16723  do
16724  {
16725  result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
16726  m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
16727  if ( ( result == VK_SUCCESS ) && presentModeCount )
16728  {
16729  presentModes.resize( presentModeCount );
16730  result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
16731  reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
16732  &presentModeCount,
16733  reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
16734  }
16735  } while ( result == VK_INCOMPLETE );
16736  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
16737  VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
16738  if ( presentModeCount < presentModes.size() )
16739  {
16740  presentModes.resize( presentModeCount );
16741  }
16742  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
16743  }
16744 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16745 
16746 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16747  template <typename Dispatch>
16748  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
16749  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16750  {
16751  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16752  return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
16753  }
16754 # else
16755  template <typename Dispatch>
16757  Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
16758  {
16759  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16760 
16761  VkResult result = d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
16762  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
16763 
16764  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
16765  }
16766 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16767 
16768 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16769  template <typename Dispatch>
16770  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
16771  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16772  {
16773  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16774  return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
16775  }
16776 # else
16777  template <typename Dispatch>
16779  Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
16780  {
16781  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16782 
16783  VkResult result = d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
16784  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
16785 
16786  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
16787  }
16788 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16789 
16790  template <typename Dispatch>
16792  Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
16794  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16795  {
16796  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16797  return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
16798  m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
16799  }
16800 
16801 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16802  template <typename Dispatch>
16804  Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
16805  {
16806  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16807 
16809  VkResult result = d.vkGetDeviceGroupSurfacePresentModes2EXT(
16810  m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
16811  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
16812 
16813  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
16814  }
16815 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16816 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
16817 
16818  //=== VK_EXT_headless_surface ===
16819 
16820  template <typename Dispatch>
16822  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
16824  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16825  {
16826  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16827  return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance,
16828  reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
16829  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
16830  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
16831  }
16832 
16833 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16834  template <typename Dispatch>
16838  Dispatch const & d ) const
16839  {
16840  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16841 
16843  VkResult result = d.vkCreateHeadlessSurfaceEXT(
16844  m_instance,
16845  reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
16846  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16847  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16848  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
16849 
16850  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
16851  }
16852 
16853 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16854  template <typename Dispatch>
16858  Dispatch const & d ) const
16859  {
16860  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16861 
16863  VkResult result = d.vkCreateHeadlessSurfaceEXT(
16864  m_instance,
16865  reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
16866  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16867  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16868  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" );
16869 
16870  return createResultValueType(
16871  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16873  }
16874 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
16875 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16876 
16877  //=== VK_KHR_buffer_device_address ===
16878 
16879  template <typename Dispatch>
16881  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16882  {
16883  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16884  return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
16885  }
16886 
16887 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16888  template <typename Dispatch>
16890  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16891  {
16892  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16893 
16894  VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
16895 
16896  return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
16897  }
16898 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16899 
16900  template <typename Dispatch>
16902  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16903  {
16904  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16905  return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
16906  }
16907 
16908 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16909  template <typename Dispatch>
16911  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16912  {
16913  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16914 
16915  uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
16916 
16917  return result;
16918  }
16919 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16920 
16921  template <typename Dispatch>
16923  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16924  {
16925  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16926  return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
16927  }
16928 
16929 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16930  template <typename Dispatch>
16932  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16933  {
16934  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16935 
16936  uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
16937 
16938  return result;
16939  }
16940 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
16941 
16942  //=== VK_EXT_line_rasterization ===
16943 
16944  template <typename Dispatch>
16945  VULKAN_HPP_INLINE void
16946  CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16947  {
16948  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16949  d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
16950  }
16951 
16952  //=== VK_EXT_host_query_reset ===
16953 
16954  template <typename Dispatch>
16956  uint32_t firstQuery,
16957  uint32_t queryCount,
16958  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16959  {
16960  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16961  d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
16962  }
16963 
16964  //=== VK_EXT_extended_dynamic_state ===
16965 
16966  template <typename Dispatch>
16968  {
16969  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16970  d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
16971  }
16972 
16973  template <typename Dispatch>
16975  {
16976  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16977  d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
16978  }
16979 
16980  template <typename Dispatch>
16982  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16983  {
16984  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16985  d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
16986  }
16987 
16988  template <typename Dispatch>
16990  const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
16991  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
16992  {
16993  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
16994  d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
16995  }
16996 
16997 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16998  template <typename Dispatch>
17000  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17001  {
17002  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17003 
17004  d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
17005  }
17006 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17007 
17008  template <typename Dispatch>
17009  VULKAN_HPP_INLINE void
17010  CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17011  {
17012  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17013  d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
17014  }
17015 
17016 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17017  template <typename Dispatch>
17019  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17020  {
17021  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17022 
17023  d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
17024  }
17025 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17026 
17027  template <typename Dispatch>
17029  uint32_t bindingCount,
17030  const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
17031  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
17032  const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
17033  const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
17034  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17035  {
17036  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17037  d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
17038  firstBinding,
17039  bindingCount,
17040  reinterpret_cast<const VkBuffer *>( pBuffers ),
17041  reinterpret_cast<const VkDeviceSize *>( pOffsets ),
17042  reinterpret_cast<const VkDeviceSize *>( pSizes ),
17043  reinterpret_cast<const VkDeviceSize *>( pStrides ) );
17044  }
17045 
17046 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17047  template <typename Dispatch>
17053  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
17054  {
17055  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17056 # ifdef VULKAN_HPP_NO_EXCEPTIONS
17057  VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
17058  VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
17059  VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
17060 # else
17061  if ( buffers.size() != offsets.size() )
17062  {
17063  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
17064  }
17065  if ( !sizes.empty() && buffers.size() != sizes.size() )
17066  {
17067  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
17068  }
17069  if ( !strides.empty() && buffers.size() != strides.size() )
17070  {
17071  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
17072  }
17073 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
17074 
17075  d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
17076  firstBinding,
17077  buffers.size(),
17078  reinterpret_cast<const VkBuffer *>( buffers.data() ),
17079  reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
17080  reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
17081  reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
17082  }
17083 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17084 
17085  template <typename Dispatch>
17087  {
17088  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17089  d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
17090  }
17091 
17092  template <typename Dispatch>
17094  {
17095  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17096  d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
17097  }
17098 
17099  template <typename Dispatch>
17101  {
17102  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17103  d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
17104  }
17105 
17106  template <typename Dispatch>
17108  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17109  {
17110  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17111  d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
17112  }
17113 
17114  template <typename Dispatch>
17116  {
17117  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17118  d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
17119  }
17120 
17121  template <typename Dispatch>
17125  VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
17127  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17128  {
17129  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17130  d.vkCmdSetStencilOpEXT( m_commandBuffer,
17131  static_cast<VkStencilFaceFlags>( faceMask ),
17132  static_cast<VkStencilOp>( failOp ),
17133  static_cast<VkStencilOp>( passOp ),
17134  static_cast<VkStencilOp>( depthFailOp ),
17135  static_cast<VkCompareOp>( compareOp ) );
17136  }
17137 
17138  //=== VK_KHR_deferred_host_operations ===
17139 
17140  template <typename Dispatch>
17142  VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
17143  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17144  {
17145  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17146  return static_cast<Result>( d.vkCreateDeferredOperationKHR(
17147  m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
17148  }
17149 
17150 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17151  template <typename Dispatch>
17154  {
17155  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17156 
17158  VkResult result = d.vkCreateDeferredOperationKHR(
17159  m_device,
17160  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17161  reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
17162  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
17163 
17164  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deferredOperation );
17165  }
17166 
17167 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17168  template <typename Dispatch>
17171  {
17172  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17173 
17175  VkResult result = d.vkCreateDeferredOperationKHR(
17176  m_device,
17177  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17178  reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
17179  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" );
17180 
17181  return createResultValueType(
17182  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
17184  }
17185 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
17186 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17187 
17188  template <typename Dispatch>
17190  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17191  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17192  {
17193  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17194  d.vkDestroyDeferredOperationKHR(
17195  m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17196  }
17197 
17198 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17199  template <typename Dispatch>
17202  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17203  {
17204  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17205 
17206  d.vkDestroyDeferredOperationKHR(
17207  m_device,
17208  static_cast<VkDeferredOperationKHR>( operation ),
17209  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17210  }
17211 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17212 
17213  template <typename Dispatch>
17215  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17216  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17217  {
17218  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17219  d.vkDestroyDeferredOperationKHR(
17220  m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17221  }
17222 
17223 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17224  template <typename Dispatch>
17227  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17228  {
17229  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17230 
17231  d.vkDestroyDeferredOperationKHR(
17232  m_device,
17233  static_cast<VkDeferredOperationKHR>( operation ),
17234  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17235  }
17236 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17237 
17238  template <typename Dispatch>
17240  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17241  {
17242  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17243  return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
17244  }
17245 
17246 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17247  template <typename Dispatch>
17249  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17250  {
17251  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17252  return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
17253  }
17254 #else
17255  template <typename Dispatch>
17258  {
17259  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17260 
17261  VkResult result = d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
17262 
17263  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
17264  }
17265 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17266 
17267 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17268  template <typename Dispatch>
17270  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17271  {
17272  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17273  return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
17274  }
17275 #else
17276  template <typename Dispatch>
17278  Dispatch const & d ) const
17279  {
17280  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17281 
17282  VkResult result = d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
17283  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
17284  VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR",
17286 
17287  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
17288  }
17289 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17290 
17291  //=== VK_KHR_pipeline_executable_properties ===
17292 
17293  template <typename Dispatch>
17295  uint32_t * pExecutableCount,
17297  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17298  {
17299  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17300  return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device,
17301  reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
17302  pExecutableCount,
17303  reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
17304  }
17305 
17306 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17307  template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
17311  {
17312  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17313 
17314  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
17315  uint32_t executableCount;
17316  VkResult result;
17317  do
17318  {
17319  result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
17320  if ( ( result == VK_SUCCESS ) && executableCount )
17321  {
17322  properties.resize( executableCount );
17323  result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
17324  reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
17325  &executableCount,
17326  reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
17327  }
17328  } while ( result == VK_INCOMPLETE );
17329  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
17330  VULKAN_HPP_ASSERT( executableCount <= properties.size() );
17331  if ( executableCount < properties.size() )
17332  {
17333  properties.resize( executableCount );
17334  }
17335  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
17336  }
17337 
17338  template <typename PipelineExecutablePropertiesKHRAllocator,
17339  typename Dispatch,
17340  typename B1,
17345  PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
17346  Dispatch const & d ) const
17347  {
17348  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17349 
17350  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
17351  pipelineExecutablePropertiesKHRAllocator );
17352  uint32_t executableCount;
17353  VkResult result;
17354  do
17355  {
17356  result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
17357  if ( ( result == VK_SUCCESS ) && executableCount )
17358  {
17359  properties.resize( executableCount );
17360  result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
17361  reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
17362  &executableCount,
17363  reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
17364  }
17365  } while ( result == VK_INCOMPLETE );
17366  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
17367  VULKAN_HPP_ASSERT( executableCount <= properties.size() );
17368  if ( executableCount < properties.size() )
17369  {
17370  properties.resize( executableCount );
17371  }
17372  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
17373  }
17374 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17375 
17376  template <typename Dispatch>
17379  uint32_t * pStatisticCount,
17381  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17382  {
17383  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17384  return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device,
17385  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
17386  pStatisticCount,
17387  reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
17388  }
17389 
17390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17391  template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
17395  {
17396  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17397 
17398  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
17399  uint32_t statisticCount;
17400  VkResult result;
17401  do
17402  {
17403  result =
17404  d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
17405  if ( ( result == VK_SUCCESS ) && statisticCount )
17406  {
17407  statistics.resize( statisticCount );
17408  result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
17409  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
17410  &statisticCount,
17411  reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
17412  }
17413  } while ( result == VK_INCOMPLETE );
17414  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
17415  VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
17416  if ( statisticCount < statistics.size() )
17417  {
17418  statistics.resize( statisticCount );
17419  }
17420  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
17421  }
17422 
17423  template <typename PipelineExecutableStatisticKHRAllocator,
17424  typename Dispatch,
17425  typename B1,
17430  PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
17431  Dispatch const & d ) const
17432  {
17433  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17434 
17435  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics(
17436  pipelineExecutableStatisticKHRAllocator );
17437  uint32_t statisticCount;
17438  VkResult result;
17439  do
17440  {
17441  result =
17442  d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
17443  if ( ( result == VK_SUCCESS ) && statisticCount )
17444  {
17445  statistics.resize( statisticCount );
17446  result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
17447  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
17448  &statisticCount,
17449  reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
17450  }
17451  } while ( result == VK_INCOMPLETE );
17452  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
17453  VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
17454  if ( statisticCount < statistics.size() )
17455  {
17456  statistics.resize( statisticCount );
17457  }
17458  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
17459  }
17460 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17461 
17462  template <typename Dispatch>
17465  uint32_t * pInternalRepresentationCount,
17467  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17468  {
17469  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17470  return static_cast<Result>(
17471  d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device,
17472  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
17473  pInternalRepresentationCount,
17474  reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
17475  }
17476 
17477 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17478  template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
17480  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
17482  {
17483  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17484 
17485  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
17486  internalRepresentations;
17487  uint32_t internalRepresentationCount;
17488  VkResult result;
17489  do
17490  {
17491  result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
17492  m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
17493  if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
17494  {
17495  internalRepresentations.resize( internalRepresentationCount );
17496  result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
17497  m_device,
17498  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
17499  &internalRepresentationCount,
17500  reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
17501  }
17502  } while ( result == VK_INCOMPLETE );
17503  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
17504  VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
17505  if ( internalRepresentationCount < internalRepresentations.size() )
17506  {
17507  internalRepresentations.resize( internalRepresentationCount );
17508  }
17509  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
17510  }
17511 
17512  template <typename PipelineExecutableInternalRepresentationKHRAllocator,
17513  typename Dispatch,
17514  typename B1,
17517  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
17519  const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
17520  PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
17521  Dispatch const & d ) const
17522  {
17523  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17524 
17525  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
17526  internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
17527  uint32_t internalRepresentationCount;
17528  VkResult result;
17529  do
17530  {
17531  result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
17532  m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
17533  if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
17534  {
17535  internalRepresentations.resize( internalRepresentationCount );
17536  result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
17537  m_device,
17538  reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
17539  &internalRepresentationCount,
17540  reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
17541  }
17542  } while ( result == VK_INCOMPLETE );
17543  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
17544  VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
17545  if ( internalRepresentationCount < internalRepresentations.size() )
17546  {
17547  internalRepresentations.resize( internalRepresentationCount );
17548  }
17549  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
17550  }
17551 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17552 
17553  //=== VK_NV_device_generated_commands ===
17554 
17555  template <typename Dispatch>
17557  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
17558  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17559  {
17560  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17561  d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
17562  reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
17563  reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
17564  }
17565 
17566 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17567  template <typename Dispatch>
17570  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17571  {
17572  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17573 
17574  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
17575  d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
17576  reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
17577  reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
17578 
17579  return memoryRequirements;
17580  }
17581 
17582  template <typename X, typename Y, typename... Z, typename Dispatch>
17585  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17586  {
17587  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17588 
17589  StructureChain<X, Y, Z...> structureChain;
17590  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
17591  d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
17592  reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
17593  reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
17594 
17595  return structureChain;
17596  }
17597 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17598 
17599  template <typename Dispatch>
17601  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17602  {
17603  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17604  d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
17605  }
17606 
17607 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17608  template <typename Dispatch>
17610  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17611  {
17612  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17613 
17614  d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
17615  }
17616 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17617 
17618  template <typename Dispatch>
17620  const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
17621  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17622  {
17623  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17624  d.vkCmdExecuteGeneratedCommandsNV(
17625  m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
17626  }
17627 
17628 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17629  template <typename Dispatch>
17631  const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
17632  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17633  {
17634  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17635 
17636  d.vkCmdExecuteGeneratedCommandsNV(
17637  m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
17638  }
17639 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17640 
17641  template <typename Dispatch>
17644  uint32_t groupIndex,
17645  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17646  {
17647  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17648  d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
17649  }
17650 
17651  template <typename Dispatch>
17654  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17655  VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
17656  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17657  {
17658  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17659  return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device,
17660  reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
17661  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
17662  reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
17663  }
17664 
17665 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17666  template <typename Dispatch>
17670  Dispatch const & d ) const
17671  {
17672  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17673 
17674  VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
17675  VkResult result = d.vkCreateIndirectCommandsLayoutNV(
17676  m_device,
17677  reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
17678  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17679  reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
17680  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
17681 
17682  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), indirectCommandsLayout );
17683  }
17684 
17685 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17686  template <typename Dispatch>
17690  Dispatch const & d ) const
17691  {
17692  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17693 
17694  VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
17695  VkResult result = d.vkCreateIndirectCommandsLayoutNV(
17696  m_device,
17697  reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
17698  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17699  reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
17700  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" );
17701 
17702  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
17704  indirectCommandsLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
17705  }
17706 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
17707 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17708 
17709  template <typename Dispatch>
17711  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17712  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17713  {
17714  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17715  d.vkDestroyIndirectCommandsLayoutNV(
17716  m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17717  }
17718 
17719 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17720  template <typename Dispatch>
17723  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17724  {
17725  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17726 
17727  d.vkDestroyIndirectCommandsLayoutNV(
17728  m_device,
17729  static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
17730  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17731  }
17732 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17733 
17734  template <typename Dispatch>
17736  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17737  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17738  {
17739  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17740  d.vkDestroyIndirectCommandsLayoutNV(
17741  m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17742  }
17743 
17744 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17745  template <typename Dispatch>
17748  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17749  {
17750  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17751 
17752  d.vkDestroyIndirectCommandsLayoutNV(
17753  m_device,
17754  static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
17755  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17756  }
17757 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17758 
17759  //=== VK_EXT_acquire_drm_display ===
17760 
17761 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17762  template <typename Dispatch>
17765  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17766  {
17767  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17768  return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
17769  }
17770 #else
17771  template <typename Dispatch>
17773  PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
17774  {
17775  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17776 
17777  VkResult result = d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) );
17778  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
17779 
17780  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
17781  }
17782 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17783 
17784  template <typename Dispatch>
17786  uint32_t connectorId,
17788  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17789  {
17790  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17791  return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
17792  }
17793 
17794 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17795  template <typename Dispatch>
17797  PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
17798  {
17799  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17800 
17802  VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
17803  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" );
17804 
17805  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
17806  }
17807 
17808 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17809  template <typename Dispatch>
17811  PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
17812  {
17813  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17814 
17816  VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
17817  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" );
17818 
17819  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
17821  }
17822 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
17823 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17824 
17825  //=== VK_EXT_private_data ===
17826 
17827  template <typename Dispatch>
17829  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17830  VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
17831  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17832  {
17833  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17834  return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device,
17835  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
17836  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
17837  reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
17838  }
17839 
17840 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17841  template <typename Dispatch>
17845  Dispatch const & d ) const
17846  {
17847  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17848 
17849  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
17850  VkResult result = d.vkCreatePrivateDataSlotEXT(
17851  m_device,
17852  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
17853  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17854  reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
17855  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
17856 
17857  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
17858  }
17859 
17860 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17861  template <typename Dispatch>
17865  Dispatch const & d ) const
17866  {
17867  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17868 
17869  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
17870  VkResult result = d.vkCreatePrivateDataSlotEXT(
17871  m_device,
17872  reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
17873  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17874  reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
17875  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" );
17876 
17877  return createResultValueType(
17878  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
17880  }
17881 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
17882 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17883 
17884  template <typename Dispatch>
17886  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
17887  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17888  {
17889  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17890  d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17891  }
17892 
17893 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17894  template <typename Dispatch>
17897  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17898  {
17899  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17900 
17901  d.vkDestroyPrivateDataSlotEXT(
17902  m_device,
17903  static_cast<VkPrivateDataSlot>( privateDataSlot ),
17904  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17905  }
17906 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17907 
17908 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17909  template <typename Dispatch>
17911  uint64_t objectHandle,
17912  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
17913  uint64_t data,
17914  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17915  {
17916  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17917  return static_cast<Result>(
17918  d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
17919  }
17920 #else
17921  template <typename Dispatch>
17923  uint64_t objectHandle,
17924  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
17925  uint64_t data,
17926  Dispatch const & d ) const
17927  {
17928  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17929 
17930  VkResult result =
17931  d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
17932  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
17933 
17934  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
17935  }
17936 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17937 
17938  template <typename Dispatch>
17940  uint64_t objectHandle,
17941  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
17942  uint64_t * pData,
17943  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17944  {
17945  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17946  d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
17947  }
17948 
17949 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17950  template <typename Dispatch>
17952  uint64_t objectHandle,
17953  VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
17954  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17955  {
17956  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17957 
17958  uint64_t data;
17959  d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
17960 
17961  return data;
17962  }
17963 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17964 
17965 #if defined( VK_ENABLE_BETA_EXTENSIONS )
17966  //=== VK_KHR_video_encode_queue ===
17967 
17968  template <typename Dispatch>
17969  VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
17970  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17971  {
17972  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17973  d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
17974  }
17975 
17976 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17977  template <typename Dispatch>
17978  VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,
17979  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17980  {
17981  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17982 
17983  d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
17984  }
17985 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
17986 #endif /*VK_ENABLE_BETA_EXTENSIONS*/
17987 
17988 #if defined( VK_USE_PLATFORM_METAL_EXT )
17989  //=== VK_EXT_metal_objects ===
17990 
17991  template <typename Dispatch>
17992  VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,
17993  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
17994  {
17995  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
17996  d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) );
17997  }
17998 
17999 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18000  template <typename Dispatch>
18001  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT
18002  Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18003  {
18004  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18005 
18006  VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
18007  d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
18008 
18009  return metalObjectsInfo;
18010  }
18011 
18012  template <typename X, typename Y, typename... Z, typename Dispatch>
18013  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18014  {
18015  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18016 
18017  StructureChain<X, Y, Z...> structureChain;
18018  VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
18019  d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
18020 
18021  return structureChain;
18022  }
18023 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18024 #endif /*VK_USE_PLATFORM_METAL_EXT*/
18025 
18026  //=== VK_KHR_synchronization2 ===
18027 
18028  template <typename Dispatch>
18030  const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
18031  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18032  {
18033  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18034  d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
18035  }
18036 
18037 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18038  template <typename Dispatch>
18040  const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
18041  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18042  {
18043  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18044 
18045  d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
18046  }
18047 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18048 
18049  template <typename Dispatch>
18052  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18053  {
18054  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18055  d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
18056  }
18057 
18058  template <typename Dispatch>
18060  const VULKAN_HPP_NAMESPACE::Event * pEvents,
18061  const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
18062  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18063  {
18064  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18065  d.vkCmdWaitEvents2KHR(
18066  m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
18067  }
18068 
18069 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18070  template <typename Dispatch>
18073  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
18074  {
18075  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18076 # ifdef VULKAN_HPP_NO_EXCEPTIONS
18077  VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
18078 # else
18079  if ( events.size() != dependencyInfos.size() )
18080  {
18081  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
18082  }
18083 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
18084 
18085  d.vkCmdWaitEvents2KHR( m_commandBuffer,
18086  events.size(),
18087  reinterpret_cast<const VkEvent *>( events.data() ),
18088  reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
18089  }
18090 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18091 
18092  template <typename Dispatch>
18094  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18095  {
18096  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18097  d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
18098  }
18099 
18100 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18101  template <typename Dispatch>
18103  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18104  {
18105  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18106 
18107  d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
18108  }
18109 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18110 
18111  template <typename Dispatch>
18114  uint32_t query,
18115  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18116  {
18117  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18118  d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
18119  }
18120 
18121  template <typename Dispatch>
18123  const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
18125  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18126  {
18127  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18128  return static_cast<Result>(
18129  d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
18130  }
18131 
18132 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18133  template <typename Dispatch>
18136  {
18137  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18138 
18139  VkResult result = d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
18140  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
18141 
18142  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
18143  }
18144 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18145 
18146  template <typename Dispatch>
18148  VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
18150  uint32_t marker,
18151  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18152  {
18153  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18154  d.vkCmdWriteBufferMarker2AMD(
18155  m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
18156  }
18157 
18158  template <typename Dispatch>
18159  VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount,
18160  VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
18161  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18162  {
18163  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18164  d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
18165  }
18166 
18167 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18168  template <typename CheckpointData2NVAllocator, typename Dispatch>
18169  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
18170  Queue::getCheckpointData2NV( Dispatch const & d ) const
18171  {
18172  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18173 
18174  std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
18175  uint32_t checkpointDataCount;
18176  d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
18177  checkpointData.resize( checkpointDataCount );
18178  d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
18179 
18180  VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
18181  if ( checkpointDataCount < checkpointData.size() )
18182  {
18183  checkpointData.resize( checkpointDataCount );
18184  }
18185  return checkpointData;
18186  }
18187 
18188  template <typename CheckpointData2NVAllocator,
18189  typename Dispatch,
18190  typename B1,
18192  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
18193  Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
18194  {
18195  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18196 
18197  std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
18198  uint32_t checkpointDataCount;
18199  d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
18200  checkpointData.resize( checkpointDataCount );
18201  d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
18202 
18203  VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
18204  if ( checkpointDataCount < checkpointData.size() )
18205  {
18206  checkpointData.resize( checkpointDataCount );
18207  }
18208  return checkpointData;
18209  }
18210 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18211 
18212  //=== VK_EXT_descriptor_buffer ===
18213 
18214  template <typename Dispatch>
18216  VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes,
18217  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18218  {
18219  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18220  d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) );
18221  }
18222 
18223 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18224  template <typename Dispatch>
18227  {
18228  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18229 
18230  VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes;
18231  d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) );
18232 
18233  return layoutSizeInBytes;
18234  }
18235 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18236 
18237  template <typename Dispatch>
18239  uint32_t binding,
18241  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18242  {
18243  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18244  d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) );
18245  }
18246 
18247 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18248  template <typename Dispatch>
18250  VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18251  {
18252  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18253 
18255  d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) );
18256 
18257  return offset;
18258  }
18259 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18260 
18261  template <typename Dispatch>
18263  size_t dataSize,
18264  void * pDescriptor,
18265  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18266  {
18267  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18268  d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor );
18269  }
18270 
18271 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18272  template <typename DescriptorType, typename Dispatch>
18274  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18275  {
18276  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18277 
18278  DescriptorType descriptor;
18279  d.vkGetDescriptorEXT(
18280  m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) );
18281 
18282  return descriptor;
18283  }
18284 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18285 
18286  template <typename Dispatch>
18289  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18290  {
18291  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18292  d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) );
18293  }
18294 
18295 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18296  template <typename Dispatch>
18297  VULKAN_HPP_INLINE void
18299  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18300  {
18301  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18302 
18303  d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) );
18304  }
18305 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18306 
18307  template <typename Dispatch>
18310  uint32_t firstSet,
18311  uint32_t setCount,
18312  const uint32_t * pBufferIndices,
18313  const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
18314  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18315  {
18316  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18317  d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer,
18318  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
18319  static_cast<VkPipelineLayout>( layout ),
18320  firstSet,
18321  setCount,
18322  pBufferIndices,
18323  reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
18324  }
18325 
18326 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18327  template <typename Dispatch>
18330  uint32_t firstSet,
18333  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
18334  {
18335  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18336 # ifdef VULKAN_HPP_NO_EXCEPTIONS
18337  VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() );
18338 # else
18339  if ( bufferIndices.size() != offsets.size() )
18340  {
18341  throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" );
18342  }
18343 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
18344 
18345  d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer,
18346  static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
18347  static_cast<VkPipelineLayout>( layout ),
18348  firstSet,
18349  bufferIndices.size(),
18350  bufferIndices.data(),
18351  reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
18352  }
18353 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18354 
18355  template <typename Dispatch>
18358  uint32_t set,
18359  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18360  {
18361  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18362  d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT(
18363  m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
18364  }
18365 
18366  template <typename Dispatch>
18368  const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18369  {
18370  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18371  return static_cast<Result>(
18372  d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18373  }
18374 
18375 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18376  template <typename DataType, typename Dispatch>
18379  {
18380  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18381 
18382  DataType data;
18383  VkResult result = d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data );
18384  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" );
18385 
18386  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
18387  }
18388 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18389 
18390  template <typename Dispatch>
18392  const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18393  {
18394  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18395  return static_cast<Result>(
18396  d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18397  }
18398 
18399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18400  template <typename DataType, typename Dispatch>
18403  {
18404  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18405 
18406  DataType data;
18407  VkResult result = d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data );
18408  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" );
18409 
18410  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
18411  }
18412 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18413 
18414  template <typename Dispatch>
18416  const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18417  {
18418  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18419  return static_cast<Result>(
18420  d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18421  }
18422 
18423 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18424  template <typename DataType, typename Dispatch>
18427  {
18428  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18429 
18430  DataType data;
18431  VkResult result =
18432  d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data );
18433  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" );
18434 
18435  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
18436  }
18437 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18438 
18439  template <typename Dispatch>
18441  const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18442  {
18443  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18444  return static_cast<Result>(
18445  d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18446  }
18447 
18448 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18449  template <typename DataType, typename Dispatch>
18452  {
18453  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18454 
18455  DataType data;
18456  VkResult result = d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data );
18457  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" );
18458 
18459  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
18460  }
18461 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18462 
18463  template <typename Dispatch>
18465  const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18466  {
18467  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18468  return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
18469  m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18470  }
18471 
18472 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18473  template <typename DataType, typename Dispatch>
18476  Dispatch const & d ) const
18477  {
18478  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18479 
18480  DataType data;
18481  VkResult result = d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
18482  m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data );
18483  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
18484  VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" );
18485 
18486  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
18487  }
18488 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18489 
18490  //=== VK_NV_fragment_shading_rate_enums ===
18491 
18492  template <typename Dispatch>
18495  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18496  {
18497  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18498  d.vkCmdSetFragmentShadingRateEnumNV(
18499  m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
18500  }
18501 
18502  //=== VK_EXT_mesh_shader ===
18503 
18504  template <typename Dispatch>
18505  VULKAN_HPP_INLINE void
18506  CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18507  {
18508  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18509  d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
18510  }
18511 
18512  template <typename Dispatch>
18515  uint32_t drawCount,
18516  uint32_t stride,
18517  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18518  {
18519  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18520  d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
18521  }
18522 
18523  template <typename Dispatch>
18526  VULKAN_HPP_NAMESPACE::Buffer countBuffer,
18527  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
18528  uint32_t maxDrawCount,
18529  uint32_t stride,
18530  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18531  {
18532  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18533  d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer,
18534  static_cast<VkBuffer>( buffer ),
18535  static_cast<VkDeviceSize>( offset ),
18536  static_cast<VkBuffer>( countBuffer ),
18537  static_cast<VkDeviceSize>( countBufferOffset ),
18538  maxDrawCount,
18539  stride );
18540  }
18541 
18542  //=== VK_KHR_copy_commands2 ===
18543 
18544  template <typename Dispatch>
18546  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18547  {
18548  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18549  d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
18550  }
18551 
18552 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18553  template <typename Dispatch>
18555  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18556  {
18557  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18558 
18559  d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
18560  }
18561 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18562 
18563  template <typename Dispatch>
18565  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18566  {
18567  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18568  d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
18569  }
18570 
18571 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18572  template <typename Dispatch>
18574  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18575  {
18576  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18577 
18578  d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
18579  }
18580 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18581 
18582  template <typename Dispatch>
18584  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18585  {
18586  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18587  d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
18588  }
18589 
18590 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18591  template <typename Dispatch>
18593  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18594  {
18595  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18596 
18597  d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
18598  }
18599 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18600 
18601  template <typename Dispatch>
18603  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18604  {
18605  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18606  d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
18607  }
18608 
18609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18610  template <typename Dispatch>
18612  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18613  {
18614  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18615 
18616  d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
18617  }
18618 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18619 
18620  template <typename Dispatch>
18622  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18623  {
18624  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18625  d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
18626  }
18627 
18628 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18629  template <typename Dispatch>
18631  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18632  {
18633  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18634 
18635  d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
18636  }
18637 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18638 
18639  template <typename Dispatch>
18641  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18642  {
18643  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18644  d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
18645  }
18646 
18647 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18648  template <typename Dispatch>
18650  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18651  {
18652  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18653 
18654  d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
18655  }
18656 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18657 
18658  //=== VK_EXT_image_compression_control ===
18659 
18660  template <typename Dispatch>
18662  const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
18664  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18665  {
18666  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18667  d.vkGetImageSubresourceLayout2EXT( m_device,
18668  static_cast<VkImage>( image ),
18669  reinterpret_cast<const VkImageSubresource2EXT *>( pSubresource ),
18670  reinterpret_cast<VkSubresourceLayout2EXT *>( pLayout ) );
18671  }
18672 
18673 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18674  template <typename Dispatch>
18677  {
18678  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18679 
18681  d.vkGetImageSubresourceLayout2EXT( m_device,
18682  static_cast<VkImage>( image ),
18683  reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
18684  reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
18685 
18686  return layout;
18687  }
18688 
18689  template <typename X, typename Y, typename... Z, typename Dispatch>
18692  {
18693  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18694 
18695  StructureChain<X, Y, Z...> structureChain;
18696  VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
18697  d.vkGetImageSubresourceLayout2EXT( m_device,
18698  static_cast<VkImage>( image ),
18699  reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
18700  reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
18701 
18702  return structureChain;
18703  }
18704 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18705 
18706  //=== VK_EXT_device_fault ===
18707 
18708  template <typename Dispatch>
18711  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18712  {
18713  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18714  return static_cast<Result>( d.vkGetDeviceFaultInfoEXT(
18715  m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
18716  }
18717 
18718 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18719  template <typename Dispatch>
18721  Device::getFaultInfoEXT( Dispatch const & d ) const
18722  {
18723  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18724 
18725  std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data;
18726  VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data.first;
18727  VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data.second;
18728  VkResult result =
18729  d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
18730  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
18731  VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT",
18733 
18735  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
18736  }
18737 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18738 
18739 #if defined( VK_USE_PLATFORM_WIN32_KHR )
18740  //=== VK_NV_acquire_winrt_display ===
18741 
18742 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18743  template <typename Dispatch>
18744  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
18745  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18746  {
18747  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18748  return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
18749  }
18750 # else
18751  template <typename Dispatch>
18753  PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
18754  {
18755  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18756 
18757  VkResult result = d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
18758  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
18759 
18760  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
18761  }
18762 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18763 
18764  template <typename Dispatch>
18765  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId,
18767  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18768  {
18769  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18770  return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
18771  }
18772 
18773 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18774  template <typename Dispatch>
18776  PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
18777  {
18778  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18779 
18781  VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
18782  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
18783 
18784  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
18785  }
18786 
18787 # ifndef VULKAN_HPP_NO_SMART_HANDLE
18788  template <typename Dispatch>
18789  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
18790  PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
18791  {
18792  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18793 
18795  VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
18796  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" );
18797 
18798  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
18799  UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
18800  }
18801 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
18802 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18803 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
18804 
18805 #if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
18806  //=== VK_EXT_directfb_surface ===
18807 
18808  template <typename Dispatch>
18809  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
18810  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
18812  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18813  {
18814  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18815  return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance,
18816  reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
18817  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
18818  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
18819  }
18820 
18821 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18822  template <typename Dispatch>
18824  Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
18825  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
18826  Dispatch const & d ) const
18827  {
18828  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18829 
18831  VkResult result = d.vkCreateDirectFBSurfaceEXT(
18832  m_instance,
18833  reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
18834  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18835  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
18836  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
18837 
18838  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
18839  }
18840 
18841 # ifndef VULKAN_HPP_NO_SMART_HANDLE
18842  template <typename Dispatch>
18843  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
18844  Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
18845  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
18846  Dispatch const & d ) const
18847  {
18848  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18849 
18851  VkResult result = d.vkCreateDirectFBSurfaceEXT(
18852  m_instance,
18853  reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
18854  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18855  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
18856  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" );
18857 
18858  return createResultValueType(
18859  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
18860  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
18861  }
18862 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
18863 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18864 
18865  template <typename Dispatch>
18866  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
18867  IDirectFB * dfb,
18868  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18869  {
18870  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18871  return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
18872  }
18873 
18874 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18875  template <typename Dispatch>
18877  PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18878  {
18879  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18880 
18881  VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
18882 
18883  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
18884  }
18885 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18886 #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
18887 
18888  //=== VK_KHR_ray_tracing_pipeline ===
18889 
18890  template <typename Dispatch>
18892  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
18893  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
18894  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
18895  uint32_t width,
18896  uint32_t height,
18897  uint32_t depth,
18898  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18899  {
18900  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18901  d.vkCmdTraceRaysKHR( m_commandBuffer,
18902  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
18903  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
18904  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
18905  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
18906  width,
18907  height,
18908  depth );
18909  }
18910 
18911 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18912  template <typename Dispatch>
18914  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
18915  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
18916  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
18917  uint32_t width,
18918  uint32_t height,
18919  uint32_t depth,
18920  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18921  {
18922  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18923 
18924  d.vkCmdTraceRaysKHR( m_commandBuffer,
18925  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
18926  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
18927  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
18928  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
18929  width,
18930  height,
18931  depth );
18932  }
18933 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
18934 
18935  template <typename Dispatch>
18939  uint32_t createInfoCount,
18941  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
18942  VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
18943  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
18944  {
18945  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18946  return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device,
18947  static_cast<VkDeferredOperationKHR>( deferredOperation ),
18948  static_cast<VkPipelineCache>( pipelineCache ),
18949  createInfoCount,
18950  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
18951  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
18952  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
18953  }
18954 
18955 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18956  template <typename PipelineAllocator, typename Dispatch>
18962  Dispatch const & d ) const
18963  {
18964  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18965 
18966  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
18967  VkResult result = d.vkCreateRayTracingPipelinesKHR(
18968  m_device,
18969  static_cast<VkDeferredOperationKHR>( deferredOperation ),
18970  static_cast<VkPipelineCache>( pipelineCache ),
18971  createInfos.size(),
18972  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
18973  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18974  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
18975  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
18976  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
18981 
18983  }
18984 
18985  template <typename PipelineAllocator,
18986  typename Dispatch,
18987  typename B0,
18994  PipelineAllocator & pipelineAllocator,
18995  Dispatch const & d ) const
18996  {
18997  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
18998 
18999  std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
19000  VkResult result = d.vkCreateRayTracingPipelinesKHR(
19001  m_device,
19002  static_cast<VkDeferredOperationKHR>( deferredOperation ),
19003  static_cast<VkPipelineCache>( pipelineCache ),
19004  createInfos.size(),
19005  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
19006  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19007  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
19008  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19009  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
19014 
19016  }
19017 
19018  template <typename Dispatch>
19024  Dispatch const & d ) const
19025  {
19026  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19027 
19029  VkResult result = d.vkCreateRayTracingPipelinesKHR(
19030  m_device,
19031  static_cast<VkDeferredOperationKHR>( deferredOperation ),
19032  static_cast<VkPipelineCache>( pipelineCache ),
19033  1,
19034  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
19035  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19036  reinterpret_cast<VkPipeline *>( &pipeline ) );
19037  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19038  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR",
19043 
19045  }
19046 
19047 # ifndef VULKAN_HPP_NO_SMART_HANDLE
19048  template <typename Dispatch, typename PipelineAllocator>
19055  Dispatch const & d ) const
19056  {
19057  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19058 
19059  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
19060  VkResult result = d.vkCreateRayTracingPipelinesKHR(
19061  m_device,
19062  static_cast<VkDeferredOperationKHR>( deferredOperation ),
19063  static_cast<VkPipelineCache>( pipelineCache ),
19064  createInfos.size(),
19065  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
19066  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19067  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
19068  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19069  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
19074  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
19075  uniquePipelines.reserve( createInfos.size() );
19076  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
19077  for ( auto const & pipeline : pipelines )
19078  {
19079  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
19080  }
19082  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
19083  }
19084 
19085  template <typename Dispatch,
19086  typename PipelineAllocator,
19087  typename B0,
19088  typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
19095  PipelineAllocator & pipelineAllocator,
19096  Dispatch const & d ) const
19097  {
19098  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19099 
19100  std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
19101  VkResult result = d.vkCreateRayTracingPipelinesKHR(
19102  m_device,
19103  static_cast<VkDeferredOperationKHR>( deferredOperation ),
19104  static_cast<VkPipelineCache>( pipelineCache ),
19105  createInfos.size(),
19106  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
19107  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19108  reinterpret_cast<VkPipeline *>( pipelines.data() ) );
19109  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19110  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
19115  std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
19116  uniquePipelines.reserve( createInfos.size() );
19117  ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
19118  for ( auto const & pipeline : pipelines )
19119  {
19120  uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
19121  }
19123  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
19124  }
19125 
19126  template <typename Dispatch>
19132  Dispatch const & d ) const
19133  {
19134  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19135 
19137  VkResult result = d.vkCreateRayTracingPipelinesKHR(
19138  m_device,
19139  static_cast<VkDeferredOperationKHR>( deferredOperation ),
19140  static_cast<VkPipelineCache>( pipelineCache ),
19141  1,
19142  reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
19143  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19144  reinterpret_cast<VkPipeline *>( &pipeline ) );
19145  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19146  VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique",
19151 
19153  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19155  }
19156 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
19157 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19158 
19159  template <typename Dispatch>
19161  uint32_t firstGroup,
19162  uint32_t groupCount,
19163  size_t dataSize,
19164  void * pData,
19165  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19166  {
19167  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19168  return static_cast<Result>(
19169  d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
19170  }
19171 
19172 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19173  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
19175  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
19176  {
19177  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19178 
19179  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
19180  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
19181  VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
19182  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
19183  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
19184 
19185  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
19186  }
19187 
19188  template <typename DataType, typename Dispatch>
19190  Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
19191  {
19192  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19193 
19194  DataType data;
19195  VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
19196  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
19197  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
19198 
19199  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
19200  }
19201 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19202 
19203  template <typename Dispatch>
19205  uint32_t firstGroup,
19206  uint32_t groupCount,
19207  size_t dataSize,
19208  void * pData,
19209  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19210  {
19211  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19212  return static_cast<Result>(
19213  d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
19214  }
19215 
19216 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19217  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
19220  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
19221  {
19222  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19223 
19224  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
19225  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
19226  VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
19227  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
19228  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
19229 
19230  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
19231  }
19232 
19233  template <typename DataType, typename Dispatch>
19235  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
19236  {
19237  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19238 
19239  DataType data;
19240  VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
19241  m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
19242  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
19243 
19244  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
19245  }
19246 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19247 
19248  template <typename Dispatch>
19250  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
19251  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
19252  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
19253  VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
19254  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19255  {
19256  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19257  d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
19258  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
19259  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
19260  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
19261  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
19262  static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
19263  }
19264 
19265 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19266  template <typename Dispatch>
19268  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
19269  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
19270  const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
19271  VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
19272  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19273  {
19274  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19275 
19276  d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
19277  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
19278  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
19279  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
19280  reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
19281  static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
19282  }
19283 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19284 
19285  template <typename Dispatch>
19287  uint32_t group,
19289  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19290  {
19291  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19292  return static_cast<DeviceSize>(
19293  d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
19294  }
19295 
19296  template <typename Dispatch>
19297  VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19298  {
19299  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19300  d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
19301  }
19302 
19303  //=== VK_EXT_vertex_input_dynamic_state ===
19304 
19305  template <typename Dispatch>
19306  VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount,
19307  const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
19308  uint32_t vertexAttributeDescriptionCount,
19309  const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
19310  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19311  {
19312  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19313  d.vkCmdSetVertexInputEXT( m_commandBuffer,
19314  vertexBindingDescriptionCount,
19315  reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
19316  vertexAttributeDescriptionCount,
19317  reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
19318  }
19319 
19320 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19321  template <typename Dispatch>
19325  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19326  {
19327  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19328 
19329  d.vkCmdSetVertexInputEXT( m_commandBuffer,
19330  vertexBindingDescriptions.size(),
19331  reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
19332  vertexAttributeDescriptions.size(),
19333  reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
19334  }
19335 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19336 
19337 #if defined( VK_USE_PLATFORM_FUCHSIA )
19338  //=== VK_FUCHSIA_external_memory ===
19339 
19340  template <typename Dispatch>
19342  Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
19343  zx_handle_t * pZirconHandle,
19344  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19345  {
19346  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19347  return static_cast<Result>(
19348  d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
19349  }
19350 
19351 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19352  template <typename Dispatch>
19354  Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
19355  {
19356  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19357 
19358  zx_handle_t zirconHandle;
19359  VkResult result =
19360  d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
19361  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
19362 
19363  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
19364  }
19365 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19366 
19367  template <typename Dispatch>
19369  Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
19370  zx_handle_t zirconHandle,
19371  VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
19372  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19373  {
19374  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19375  return static_cast<Result>(
19376  d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
19377  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
19378  zirconHandle,
19379  reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
19380  }
19381 
19382 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19383  template <typename Dispatch>
19385  Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
19386  zx_handle_t zirconHandle,
19387  Dispatch const & d ) const
19388  {
19389  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19390 
19391  VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
19392  VkResult result = d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
19393  static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
19394  zirconHandle,
19395  reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) );
19396  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
19397 
19398  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryZirconHandleProperties );
19399  }
19400 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19401 #endif /*VK_USE_PLATFORM_FUCHSIA*/
19402 
19403 #if defined( VK_USE_PLATFORM_FUCHSIA )
19404  //=== VK_FUCHSIA_external_semaphore ===
19405 
19406  template <typename Dispatch>
19407  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA(
19408  const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19409  {
19410  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19411  return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
19412  m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
19413  }
19414 
19415 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19416  template <typename Dispatch>
19418  Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
19419  Dispatch const & d ) const
19420  {
19421  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19422 
19423  VkResult result = d.vkImportSemaphoreZirconHandleFUCHSIA(
19424  m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) );
19425  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
19426 
19427  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
19428  }
19429 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19430 
19431  template <typename Dispatch>
19433  Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
19434  zx_handle_t * pZirconHandle,
19435  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19436  {
19437  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19438  return static_cast<Result>(
19439  d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
19440  }
19441 
19442 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19443  template <typename Dispatch>
19445  Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
19446  {
19447  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19448 
19449  zx_handle_t zirconHandle;
19450  VkResult result =
19451  d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
19452  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
19453 
19454  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
19455  }
19456 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19457 #endif /*VK_USE_PLATFORM_FUCHSIA*/
19458 
19459 #if defined( VK_USE_PLATFORM_FUCHSIA )
19460  //=== VK_FUCHSIA_buffer_collection ===
19461 
19462  template <typename Dispatch>
19464  Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,
19465  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
19466  VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,
19467  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19468  {
19469  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19470  return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device,
19471  reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ),
19472  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
19473  reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) );
19474  }
19475 
19476 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19477  template <typename Dispatch>
19479  Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
19480  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19481  Dispatch const & d ) const
19482  {
19483  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19484 
19485  VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
19486  VkResult result = d.vkCreateBufferCollectionFUCHSIA(
19487  m_device,
19488  reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
19489  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19490  reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
19491  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" );
19492 
19493  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), collection );
19494  }
19495 
19496 # ifndef VULKAN_HPP_NO_SMART_HANDLE
19497  template <typename Dispatch>
19498  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type
19499  Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
19500  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19501  Dispatch const & d ) const
19502  {
19503  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19504 
19505  VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
19506  VkResult result = d.vkCreateBufferCollectionFUCHSIA(
19507  m_device,
19508  reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
19509  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19510  reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
19511  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" );
19512 
19513  return createResultValueType(
19514  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19515  UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
19516  }
19517 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
19518 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19519 
19520  template <typename Dispatch>
19522  Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19523  const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,
19524  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19525  {
19526  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19527  return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA(
19528  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
19529  }
19530 
19531 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19532  template <typename Dispatch>
19534  Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19535  const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,
19536  Dispatch const & d ) const
19537  {
19538  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19539 
19540  VkResult result = d.vkSetBufferCollectionImageConstraintsFUCHSIA(
19541  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) );
19542  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" );
19543 
19544  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
19545  }
19546 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19547 
19548  template <typename Dispatch>
19550  Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19551  const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,
19552  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19553  {
19554  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19555  return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
19556  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
19557  }
19558 
19559 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19560  template <typename Dispatch>
19562  Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19563  const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,
19564  Dispatch const & d ) const
19565  {
19566  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19567 
19568  VkResult result = d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
19569  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) );
19570  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" );
19571 
19572  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
19573  }
19574 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19575 
19576  template <typename Dispatch>
19577  VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19578  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
19579  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19580  {
19581  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19582  d.vkDestroyBufferCollectionFUCHSIA(
19583  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
19584  }
19585 
19586 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19587  template <typename Dispatch>
19588  VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19589  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19590  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19591  {
19592  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19593 
19594  d.vkDestroyBufferCollectionFUCHSIA(
19595  m_device,
19596  static_cast<VkBufferCollectionFUCHSIA>( collection ),
19597  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
19598  }
19599 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19600 
19601  template <typename Dispatch>
19602  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19603  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
19604  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19605  {
19606  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19607  d.vkDestroyBufferCollectionFUCHSIA(
19608  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
19609  }
19610 
19611 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19612  template <typename Dispatch>
19613  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19614  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19615  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19616  {
19617  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19618 
19619  d.vkDestroyBufferCollectionFUCHSIA(
19620  m_device,
19621  static_cast<VkBufferCollectionFUCHSIA>( collection ),
19622  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
19623  }
19624 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19625 
19626  template <typename Dispatch>
19628  Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19629  VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,
19630  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19631  {
19632  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19633  return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA(
19634  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
19635  }
19636 
19637 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19638  template <typename Dispatch>
19640  Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const
19641  {
19642  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19643 
19644  VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
19645  VkResult result = d.vkGetBufferCollectionPropertiesFUCHSIA(
19646  m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) );
19647  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" );
19648 
19649  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
19650  }
19651 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19652 #endif /*VK_USE_PLATFORM_FUCHSIA*/
19653 
19654  //=== VK_HUAWEI_subpass_shading ===
19655 
19656  template <typename Dispatch>
19658  VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,
19659  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19660  {
19661  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19662  return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
19663  m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) );
19664  }
19665 
19666 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19667  template <typename Dispatch>
19670  {
19671  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19672 
19673  VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
19674  VkResult result = d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
19675  m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) );
19676  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19677  VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI",
19679 
19680  return ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize );
19681  }
19682 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19683 
19684  template <typename Dispatch>
19686  {
19687  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19688  d.vkCmdSubpassShadingHUAWEI( m_commandBuffer );
19689  }
19690 
19691  //=== VK_HUAWEI_invocation_mask ===
19692 
19693  template <typename Dispatch>
19696  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19697  {
19698  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19699  d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
19700  }
19701 
19702  //=== VK_NV_external_memory_rdma ===
19703 
19704  template <typename Dispatch>
19708  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19709  {
19710  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19711  return static_cast<Result>( d.vkGetMemoryRemoteAddressNV(
19712  m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
19713  }
19714 
19715 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19716  template <typename Dispatch>
19718  Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const
19719  {
19720  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19721 
19723  VkResult result = d.vkGetMemoryRemoteAddressNV(
19724  m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) );
19725  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
19726 
19727  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), address );
19728  }
19729 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19730 
19731  //=== VK_EXT_pipeline_properties ===
19732 
19733  template <typename Dispatch>
19735  VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,
19736  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19737  {
19738  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19739  return static_cast<Result>( d.vkGetPipelinePropertiesEXT(
19740  m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) );
19741  }
19742 
19743 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19744  template <typename Dispatch>
19746  Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const
19747  {
19748  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19749 
19750  VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
19751  VkResult result = d.vkGetPipelinePropertiesEXT(
19752  m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) );
19753  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
19754 
19755  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineProperties );
19756  }
19757 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19758 
19759  //=== VK_EXT_extended_dynamic_state2 ===
19760 
19761  template <typename Dispatch>
19762  VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19763  {
19764  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19765  d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
19766  }
19767 
19768  template <typename Dispatch>
19770  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19771  {
19772  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19773  d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
19774  }
19775 
19776  template <typename Dispatch>
19778  {
19779  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19780  d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
19781  }
19782 
19783  template <typename Dispatch>
19785  {
19786  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19787  d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
19788  }
19789 
19790  template <typename Dispatch>
19792  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19793  {
19794  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19795  d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
19796  }
19797 
19798 #if defined( VK_USE_PLATFORM_SCREEN_QNX )
19799  //=== VK_QNX_screen_surface ===
19800 
19801  template <typename Dispatch>
19802  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
19803  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
19805  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19806  {
19807  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19808  return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance,
19809  reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
19810  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
19811  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
19812  }
19813 
19814 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19815  template <typename Dispatch>
19817  Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
19818  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19819  Dispatch const & d ) const
19820  {
19821  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19822 
19824  VkResult result = d.vkCreateScreenSurfaceQNX(
19825  m_instance,
19826  reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
19827  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19828  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
19829  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
19830 
19831  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
19832  }
19833 
19834 # ifndef VULKAN_HPP_NO_SMART_HANDLE
19835  template <typename Dispatch>
19836  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
19837  Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
19838  Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19839  Dispatch const & d ) const
19840  {
19841  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19842 
19844  VkResult result = d.vkCreateScreenSurfaceQNX(
19845  m_instance,
19846  reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
19847  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19848  reinterpret_cast<VkSurfaceKHR *>( &surface ) );
19849  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" );
19850 
19851  return createResultValueType(
19852  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19853  UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
19854  }
19855 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
19856 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19857 
19858  template <typename Dispatch>
19859  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
19860  struct _screen_window * window,
19861  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19862  {
19863  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19864  return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
19865  }
19866 
19867 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19868  template <typename Dispatch>
19870  PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19871  {
19872  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19873 
19874  VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
19875 
19876  return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
19877  }
19878 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19879 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/
19880 
19881  //=== VK_EXT_color_write_enable ===
19882 
19883  template <typename Dispatch>
19885  const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
19886  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19887  {
19888  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19889  d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
19890  }
19891 
19892 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19893  template <typename Dispatch>
19895  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19896  {
19897  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19898 
19899  d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
19900  }
19901 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19902 
19903  //=== VK_KHR_ray_tracing_maintenance1 ===
19904 
19905  template <typename Dispatch>
19907  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19908  {
19909  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19910  d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
19911  }
19912 
19913  //=== VK_EXT_multi_draw ===
19914 
19915  template <typename Dispatch>
19917  const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo,
19918  uint32_t instanceCount,
19919  uint32_t firstInstance,
19920  uint32_t stride,
19921  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19922  {
19923  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19924  d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride );
19925  }
19926 
19927 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19928  template <typename Dispatch>
19930  uint32_t instanceCount,
19931  uint32_t firstInstance,
19932  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19933  {
19934  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19935 
19936  d.vkCmdDrawMultiEXT( m_commandBuffer,
19937  vertexInfo.size(),
19938  reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ),
19939  instanceCount,
19940  firstInstance,
19941  vertexInfo.stride() );
19942  }
19943 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19944 
19945  template <typename Dispatch>
19948  uint32_t instanceCount,
19949  uint32_t firstInstance,
19950  uint32_t stride,
19951  const int32_t * pVertexOffset,
19952  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19953  {
19954  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19955  d.vkCmdDrawMultiIndexedEXT(
19956  m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset );
19957  }
19958 
19959 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19960  template <typename Dispatch>
19961  VULKAN_HPP_INLINE void
19963  uint32_t instanceCount,
19964  uint32_t firstInstance,
19965  Optional<const int32_t> vertexOffset,
19966  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19967  {
19968  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19969 
19970  d.vkCmdDrawMultiIndexedEXT( m_commandBuffer,
19971  indexInfo.size(),
19972  reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ),
19973  instanceCount,
19974  firstInstance,
19975  indexInfo.stride(),
19976  static_cast<const int32_t *>( vertexOffset ) );
19977  }
19978 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
19979 
19980  //=== VK_EXT_opacity_micromap ===
19981 
19982  template <typename Dispatch>
19984  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
19986  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
19987  {
19988  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
19989  return static_cast<Result>( d.vkCreateMicromapEXT( m_device,
19990  reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ),
19991  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
19992  reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) );
19993  }
19994 
19995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19996  template <typename Dispatch>
20000  Dispatch const & d ) const
20001  {
20002  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20003 
20005  VkResult result =
20006  d.vkCreateMicromapEXT( m_device,
20007  reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
20008  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
20009  reinterpret_cast<VkMicromapEXT *>( &micromap ) );
20010  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" );
20011 
20012  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), micromap );
20013  }
20014 
20015 # ifndef VULKAN_HPP_NO_SMART_HANDLE
20016  template <typename Dispatch>
20020  Dispatch const & d ) const
20021  {
20022  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20023 
20025  VkResult result =
20026  d.vkCreateMicromapEXT( m_device,
20027  reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
20028  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
20029  reinterpret_cast<VkMicromapEXT *>( &micromap ) );
20030  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" );
20031 
20032  return createResultValueType(
20033  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20035  }
20036 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
20037 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20038 
20039  template <typename Dispatch>
20041  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
20042  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20043  {
20044  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20045  d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
20046  }
20047 
20048 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20049  template <typename Dispatch>
20052  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20053  {
20054  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20055 
20056  d.vkDestroyMicromapEXT( m_device,
20057  static_cast<VkMicromapEXT>( micromap ),
20058  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
20059  }
20060 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20061 
20062  template <typename Dispatch>
20064  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
20065  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20066  {
20067  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20068  d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
20069  }
20070 
20071 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20072  template <typename Dispatch>
20075  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20076  {
20077  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20078 
20079  d.vkDestroyMicromapEXT( m_device,
20080  static_cast<VkMicromapEXT>( micromap ),
20081  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
20082  }
20083 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20084 
20085  template <typename Dispatch>
20088  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20089  {
20090  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20091  d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) );
20092  }
20093 
20094 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20095  template <typename Dispatch>
20097  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20098  {
20099  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20100 
20101  d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
20102  }
20103 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20104 
20105  template <typename Dispatch>
20107  uint32_t infoCount,
20109  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20110  {
20111  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20112  return static_cast<Result>( d.vkBuildMicromapsEXT(
20113  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) );
20114  }
20115 
20116 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20117  template <typename Dispatch>
20121  Dispatch const & d ) const
20122  {
20123  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20124 
20125  VkResult result = d.vkBuildMicromapsEXT(
20126  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
20127  resultCheck(
20128  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20129  VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT",
20131 
20132  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
20133  }
20134 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20135 
20136  template <typename Dispatch>
20139  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20140  {
20141  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20142  return static_cast<Result>(
20143  d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) );
20144  }
20145 
20146 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20147  template <typename Dispatch>
20150  Dispatch const & d ) const
20151  {
20152  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20153 
20154  VkResult result =
20155  d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
20156  resultCheck(
20157  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20158  VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT",
20160 
20161  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
20162  }
20163 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20164 
20165  template <typename Dispatch>
20168  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20169  {
20170  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20171  return static_cast<Result>( d.vkCopyMicromapToMemoryEXT(
20172  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) );
20173  }
20174 
20175 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20176  template <typename Dispatch>
20178  VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const
20179  {
20180  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20181 
20182  VkResult result = d.vkCopyMicromapToMemoryEXT(
20183  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
20184  resultCheck(
20185  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20186  VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT",
20188 
20189  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
20190  }
20191 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20192 
20193  template <typename Dispatch>
20196  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20197  {
20198  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20199  return static_cast<Result>( d.vkCopyMemoryToMicromapEXT(
20200  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) );
20201  }
20202 
20203 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20204  template <typename Dispatch>
20206  VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const
20207  {
20208  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20209 
20210  VkResult result = d.vkCopyMemoryToMicromapEXT(
20211  m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
20212  resultCheck(
20213  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20214  VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT",
20216 
20217  return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
20218  }
20219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20220 
20221  template <typename Dispatch>
20223  const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
20225  size_t dataSize,
20226  void * pData,
20227  size_t stride,
20228  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20229  {
20230  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20231  return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT(
20232  m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
20233  }
20234 
20235 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20236  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
20240  size_t dataSize,
20241  size_t stride,
20242  Dispatch const & d ) const
20243  {
20244  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20245 
20246  VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
20247  std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
20248  VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
20249  micromaps.size(),
20250  reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
20251  static_cast<VkQueryType>( queryType ),
20252  data.size() * sizeof( DataType ),
20253  reinterpret_cast<void *>( data.data() ),
20254  stride );
20255  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
20256 
20257  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
20258  }
20259 
20260  template <typename DataType, typename Dispatch>
20264  size_t stride,
20265  Dispatch const & d ) const
20266  {
20267  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20268 
20269  DataType data;
20270  VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
20271  micromaps.size(),
20272  reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
20273  static_cast<VkQueryType>( queryType ),
20274  sizeof( DataType ),
20275  reinterpret_cast<void *>( &data ),
20276  stride );
20277  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
20278 
20279  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
20280  }
20281 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20282 
20283  template <typename Dispatch>
20285  {
20286  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20287  d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) );
20288  }
20289 
20290 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20291  template <typename Dispatch>
20293  {
20294  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20295 
20296  d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
20297  }
20298 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20299 
20300  template <typename Dispatch>
20302  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20303  {
20304  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20305  d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) );
20306  }
20307 
20308 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20309  template <typename Dispatch>
20311  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20312  {
20313  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20314 
20315  d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
20316  }
20317 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20318 
20319  template <typename Dispatch>
20321  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20322  {
20323  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20324  d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) );
20325  }
20326 
20327 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20328  template <typename Dispatch>
20330  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20331  {
20332  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20333 
20334  d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
20335  }
20336 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20337 
20338  template <typename Dispatch>
20340  const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
20343  uint32_t firstQuery,
20344  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20345  {
20346  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20347  d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
20348  micromapCount,
20349  reinterpret_cast<const VkMicromapEXT *>( pMicromaps ),
20350  static_cast<VkQueryType>( queryType ),
20351  static_cast<VkQueryPool>( queryPool ),
20352  firstQuery );
20353  }
20354 
20355 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20356  template <typename Dispatch>
20357  VULKAN_HPP_INLINE void
20361  uint32_t firstQuery,
20362  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20363  {
20364  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20365 
20366  d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
20367  micromaps.size(),
20368  reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
20369  static_cast<VkQueryType>( queryType ),
20370  static_cast<VkQueryPool>( queryPool ),
20371  firstQuery );
20372  }
20373 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20374 
20375  template <typename Dispatch>
20378  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20379  {
20380  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20381  d.vkGetDeviceMicromapCompatibilityEXT( m_device,
20382  reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ),
20383  reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
20384  }
20385 
20386 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20387  template <typename Dispatch>
20390  {
20391  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20392 
20394  d.vkGetDeviceMicromapCompatibilityEXT( m_device,
20395  reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
20396  reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
20397 
20398  return compatibility;
20399  }
20400 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20401 
20402  template <typename Dispatch>
20404  const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,
20406  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20407  {
20408  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20409  d.vkGetMicromapBuildSizesEXT( m_device,
20410  static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
20411  reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ),
20412  reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) );
20413  }
20414 
20415 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20416  template <typename Dispatch>
20420  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20421  {
20422  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20423 
20425  d.vkGetMicromapBuildSizesEXT( m_device,
20426  static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
20427  reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
20428  reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
20429 
20430  return sizeInfo;
20431  }
20432 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20433 
20434  //=== VK_EXT_pageable_device_local_memory ===
20435 
20436  template <typename Dispatch>
20438  {
20439  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20440  d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority );
20441  }
20442 
20443  //=== VK_KHR_maintenance4 ===
20444 
20445  template <typename Dispatch>
20447  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
20448  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20449  {
20450  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20451  d.vkGetDeviceBufferMemoryRequirementsKHR(
20452  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
20453  }
20454 
20455 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20456  template <typename Dispatch>
20459  {
20460  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20461 
20462  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
20463  d.vkGetDeviceBufferMemoryRequirementsKHR(
20464  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20465 
20466  return memoryRequirements;
20467  }
20468 
20469  template <typename X, typename Y, typename... Z, typename Dispatch>
20472  {
20473  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20474 
20475  StructureChain<X, Y, Z...> structureChain;
20476  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
20477  d.vkGetDeviceBufferMemoryRequirementsKHR(
20478  m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20479 
20480  return structureChain;
20481  }
20482 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20483 
20484  template <typename Dispatch>
20486  VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
20487  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20488  {
20489  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20490  d.vkGetDeviceImageMemoryRequirementsKHR(
20491  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
20492  }
20493 
20494 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20495  template <typename Dispatch>
20498  {
20499  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20500 
20501  VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
20502  d.vkGetDeviceImageMemoryRequirementsKHR(
20503  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20504 
20505  return memoryRequirements;
20506  }
20507 
20508  template <typename X, typename Y, typename... Z, typename Dispatch>
20511  {
20512  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20513 
20514  StructureChain<X, Y, Z...> structureChain;
20515  VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
20516  d.vkGetDeviceImageMemoryRequirementsKHR(
20517  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20518 
20519  return structureChain;
20520  }
20521 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20522 
20523  template <typename Dispatch>
20525  uint32_t * pSparseMemoryRequirementCount,
20526  VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
20527  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20528  {
20529  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20530  d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
20531  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
20532  pSparseMemoryRequirementCount,
20533  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
20534  }
20535 
20536 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20537  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
20538  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
20540  {
20541  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20542 
20543  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
20544  uint32_t sparseMemoryRequirementCount;
20545  d.vkGetDeviceImageSparseMemoryRequirementsKHR(
20546  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
20547  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
20548  d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
20549  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
20550  &sparseMemoryRequirementCount,
20551  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
20552 
20553  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
20554  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
20555  {
20556  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
20557  }
20558  return sparseMemoryRequirements;
20559  }
20560 
20561  template <typename SparseImageMemoryRequirements2Allocator,
20562  typename Dispatch,
20563  typename B1,
20565  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
20567  SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
20568  Dispatch const & d ) const
20569  {
20570  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20571 
20572  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
20573  sparseImageMemoryRequirements2Allocator );
20574  uint32_t sparseMemoryRequirementCount;
20575  d.vkGetDeviceImageSparseMemoryRequirementsKHR(
20576  m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
20577  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
20578  d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
20579  reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
20580  &sparseMemoryRequirementCount,
20581  reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
20582 
20583  VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
20584  if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
20585  {
20586  sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
20587  }
20588  return sparseMemoryRequirements;
20589  }
20590 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20591 
20592  //=== VK_VALVE_descriptor_set_host_mapping ===
20593 
20594  template <typename Dispatch>
20597  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20598  {
20599  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20600  d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
20601  reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ),
20602  reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) );
20603  }
20604 
20605 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20606  template <typename Dispatch>
20609  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20610  {
20611  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20612 
20614  d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
20615  reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ),
20616  reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
20617 
20618  return hostMapping;
20619  }
20620 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20621 
20622  template <typename Dispatch>
20623  VULKAN_HPP_INLINE void
20625  {
20626  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20627  d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData );
20628  }
20629 
20630 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20631  template <typename Dispatch>
20633  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20634  {
20635  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20636 
20637  void * pData;
20638  d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData );
20639 
20640  return pData;
20641  }
20642 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20643 
20644  //=== VK_NV_copy_memory_indirect ===
20645 
20646  template <typename Dispatch>
20648  uint32_t copyCount,
20649  uint32_t stride,
20650  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20651  {
20652  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20653  d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride );
20654  }
20655 
20656  template <typename Dispatch>
20658  uint32_t copyCount,
20659  uint32_t stride,
20660  VULKAN_HPP_NAMESPACE::Image dstImage,
20661  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
20662  const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources,
20663  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20664  {
20665  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20666  d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer,
20667  static_cast<VkDeviceAddress>( copyBufferAddress ),
20668  copyCount,
20669  stride,
20670  static_cast<VkImage>( dstImage ),
20671  static_cast<VkImageLayout>( dstImageLayout ),
20672  reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) );
20673  }
20674 
20675 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20676  template <typename Dispatch>
20677  VULKAN_HPP_INLINE void
20679  uint32_t stride,
20680  VULKAN_HPP_NAMESPACE::Image dstImage,
20681  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
20683  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20684  {
20685  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20686 
20687  d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer,
20688  static_cast<VkDeviceAddress>( copyBufferAddress ),
20689  imageSubresources.size(),
20690  stride,
20691  static_cast<VkImage>( dstImage ),
20692  static_cast<VkImageLayout>( dstImageLayout ),
20693  reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) );
20694  }
20695 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20696 
20697  //=== VK_NV_memory_decompression ===
20698 
20699  template <typename Dispatch>
20700  VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount,
20701  const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions,
20702  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20703  {
20704  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20705  d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) );
20706  }
20707 
20708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20709  template <typename Dispatch>
20710  VULKAN_HPP_INLINE void
20712  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20713  {
20714  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20715 
20716  d.vkCmdDecompressMemoryNV(
20717  m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) );
20718  }
20719 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20720 
20721  template <typename Dispatch>
20723  VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,
20724  uint32_t stride,
20725  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20726  {
20727  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20728  d.vkCmdDecompressMemoryIndirectCountNV(
20729  m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride );
20730  }
20731 
20732  //=== VK_EXT_extended_dynamic_state3 ===
20733 
20734  template <typename Dispatch>
20736  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20737  {
20738  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20739  d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
20740  }
20741 
20742  template <typename Dispatch>
20744  {
20745  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20746  d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) );
20747  }
20748 
20749  template <typename Dispatch>
20751  {
20752  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20753  d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) );
20754  }
20755 
20756  template <typename Dispatch>
20758  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20759  {
20760  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20761  d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
20762  }
20763 
20764  template <typename Dispatch>
20766  const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,
20767  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20768  {
20769  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20770  d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) );
20771  }
20772 
20773 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20774  template <typename Dispatch>
20777  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20778  {
20779  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20780 
20781  d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
20782  }
20783 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20784 
20785  template <typename Dispatch>
20787  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20788  {
20789  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20790  d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) );
20791  }
20792 
20793  template <typename Dispatch>
20795  {
20796  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20797  d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) );
20798  }
20799 
20800  template <typename Dispatch>
20802  {
20803  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20804  d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) );
20805  }
20806 
20807  template <typename Dispatch>
20809  uint32_t attachmentCount,
20810  const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,
20811  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20812  {
20813  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20814  d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) );
20815  }
20816 
20817 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20818  template <typename Dispatch>
20821  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20822  {
20823  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20824 
20825  d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
20826  }
20827 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20828 
20829  template <typename Dispatch>
20831  uint32_t attachmentCount,
20832  const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,
20833  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20834  {
20835  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20836  d.vkCmdSetColorBlendEquationEXT(
20837  m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) );
20838  }
20839 
20840 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20841  template <typename Dispatch>
20842  VULKAN_HPP_INLINE void
20843  CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment,
20845  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20846  {
20847  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20848 
20849  d.vkCmdSetColorBlendEquationEXT(
20850  m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
20851  }
20852 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20853 
20854  template <typename Dispatch>
20856  uint32_t attachmentCount,
20857  const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,
20858  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20859  {
20860  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20861  d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) );
20862  }
20863 
20864 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20865  template <typename Dispatch>
20866  VULKAN_HPP_INLINE void
20867  CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment,
20869  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20870  {
20871  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20872 
20873  d.vkCmdSetColorWriteMaskEXT(
20874  m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
20875  }
20876 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20877 
20878  template <typename Dispatch>
20879  VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20880  {
20881  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20882  d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream );
20883  }
20884 
20885  template <typename Dispatch>
20886  VULKAN_HPP_INLINE void
20888  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20889  {
20890  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20891  d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
20892  }
20893 
20894  template <typename Dispatch>
20895  VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize,
20896  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20897  {
20898  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20899  d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize );
20900  }
20901 
20902  template <typename Dispatch>
20904  {
20905  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20906  d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) );
20907  }
20908 
20909  template <typename Dispatch>
20911  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20912  {
20913  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20914  d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) );
20915  }
20916 
20917  template <typename Dispatch>
20919  uint32_t attachmentCount,
20920  const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,
20921  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20922  {
20923  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20924  d.vkCmdSetColorBlendAdvancedEXT(
20925  m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) );
20926  }
20927 
20928 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20929  template <typename Dispatch>
20930  VULKAN_HPP_INLINE void
20931  CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment,
20933  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20934  {
20935  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20936 
20937  d.vkCmdSetColorBlendAdvancedEXT(
20938  m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
20939  }
20940 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
20941 
20942  template <typename Dispatch>
20944  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20945  {
20946  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20947  d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
20948  }
20949 
20950  template <typename Dispatch>
20952  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20953  {
20954  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20955  d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
20956  }
20957 
20958  template <typename Dispatch>
20960  {
20961  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20962  d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) );
20963  }
20964 
20965  template <typename Dispatch>
20967  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20968  {
20969  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20970  d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) );
20971  }
20972 
20973  template <typename Dispatch>
20975  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20976  {
20977  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20978  d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) );
20979  }
20980 
20981  template <typename Dispatch>
20983  uint32_t viewportCount,
20984  const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,
20985  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20986  {
20987  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20988  d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) );
20989  }
20990 
20991 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20992  template <typename Dispatch>
20993  VULKAN_HPP_INLINE void
20994  CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport,
20996  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
20997  {
20998  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
20999 
21000  d.vkCmdSetViewportSwizzleNV(
21001  m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
21002  }
21003 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21004 
21005  template <typename Dispatch>
21007  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21008  {
21009  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21010  d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) );
21011  }
21012 
21013  template <typename Dispatch>
21014  VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21015  {
21016  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21017  d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation );
21018  }
21019 
21020  template <typename Dispatch>
21022  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21023  {
21024  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21025  d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
21026  }
21027 
21028  template <typename Dispatch>
21030  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21031  {
21032  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21033  d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) );
21034  }
21035 
21036  template <typename Dispatch>
21037  VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount,
21038  const float * pCoverageModulationTable,
21039  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21040  {
21041  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21042  d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
21043  }
21044 
21045 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21046  template <typename Dispatch>
21048  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21049  {
21050  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21051 
21052  d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() );
21053  }
21054 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21055 
21056  template <typename Dispatch>
21058  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21059  {
21060  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21061  d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) );
21062  }
21063 
21064  template <typename Dispatch>
21066  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21067  {
21068  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21069  d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) );
21070  }
21071 
21072  template <typename Dispatch>
21074  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21075  {
21076  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21077  d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
21078  }
21079 
21080  //=== VK_EXT_shader_module_identifier ===
21081 
21082  template <typename Dispatch>
21085  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21086  {
21087  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21088  d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
21089  }
21090 
21091 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21092  template <typename Dispatch>
21095  {
21096  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21097 
21099  d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
21100 
21101  return identifier;
21102  }
21103 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21104 
21105  template <typename Dispatch>
21108  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21109  {
21110  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21111  d.vkGetShaderModuleCreateInfoIdentifierEXT(
21112  m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
21113  }
21114 
21115 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21116  template <typename Dispatch>
21119  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21120  {
21121  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21122 
21124  d.vkGetShaderModuleCreateInfoIdentifierEXT(
21125  m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
21126 
21127  return identifier;
21128  }
21129 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21130 
21131  //=== VK_NV_optical_flow ===
21132 
21133  template <typename Dispatch>
21136  uint32_t * pFormatCount,
21138  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21139  {
21140  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21141  return static_cast<Result>(
21142  d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
21143  reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ),
21144  pFormatCount,
21145  reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) );
21146  }
21147 
21148 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21149  template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch>
21153  Dispatch const & d ) const
21154  {
21155  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21156 
21157  std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties;
21158  uint32_t formatCount;
21159  VkResult result;
21160  do
21161  {
21162  result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
21163  m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
21164  if ( ( result == VK_SUCCESS ) && formatCount )
21165  {
21166  imageFormatProperties.resize( formatCount );
21167  result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
21168  reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
21169  &formatCount,
21170  reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
21171  }
21172  } while ( result == VK_INCOMPLETE );
21173  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
21174  VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
21175  if ( formatCount < imageFormatProperties.size() )
21176  {
21177  imageFormatProperties.resize( formatCount );
21178  }
21179  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
21180  }
21181 
21182  template <typename OpticalFlowImageFormatPropertiesNVAllocator,
21183  typename Dispatch,
21184  typename B1,
21189  OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,
21190  Dispatch const & d ) const
21191  {
21192  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21193 
21194  std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties(
21195  opticalFlowImageFormatPropertiesNVAllocator );
21196  uint32_t formatCount;
21197  VkResult result;
21198  do
21199  {
21200  result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
21201  m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
21202  if ( ( result == VK_SUCCESS ) && formatCount )
21203  {
21204  imageFormatProperties.resize( formatCount );
21205  result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
21206  reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
21207  &formatCount,
21208  reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
21209  }
21210  } while ( result == VK_INCOMPLETE );
21211  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
21212  VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
21213  if ( formatCount < imageFormatProperties.size() )
21214  {
21215  imageFormatProperties.resize( formatCount );
21216  }
21217  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
21218  }
21219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21220 
21221  template <typename Dispatch>
21223  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21225  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21226  {
21227  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21228  return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device,
21229  reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ),
21230  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
21231  reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) );
21232  }
21233 
21234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21235  template <typename Dispatch>
21239  Dispatch const & d ) const
21240  {
21241  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21242 
21244  VkResult result = d.vkCreateOpticalFlowSessionNV(
21245  m_device,
21246  reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
21247  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21248  reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
21249  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" );
21250 
21251  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), session );
21252  }
21253 
21254 # ifndef VULKAN_HPP_NO_SMART_HANDLE
21255  template <typename Dispatch>
21259  Dispatch const & d ) const
21260  {
21261  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21262 
21264  VkResult result = d.vkCreateOpticalFlowSessionNV(
21265  m_device,
21266  reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
21267  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21268  reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
21269  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" );
21270 
21271  return createResultValueType(
21272  static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
21274  }
21275 # endif /* VULKAN_HPP_NO_SMART_HANDLE */
21276 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21277 
21278  template <typename Dispatch>
21280  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21281  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21282  {
21283  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21284  d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
21285  }
21286 
21287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21288  template <typename Dispatch>
21291  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21292  {
21293  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21294 
21295  d.vkDestroyOpticalFlowSessionNV(
21296  m_device,
21297  static_cast<VkOpticalFlowSessionNV>( session ),
21298  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
21299  }
21300 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21301 
21302  template <typename Dispatch>
21304  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
21305  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21306  {
21307  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21308  d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
21309  }
21310 
21311 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21312  template <typename Dispatch>
21315  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21316  {
21317  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21318 
21319  d.vkDestroyOpticalFlowSessionNV(
21320  m_device,
21321  static_cast<VkOpticalFlowSessionNV>( session ),
21322  reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
21323  }
21324 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21325 
21326 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
21327  template <typename Dispatch>
21332  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21333  {
21334  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21335  return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device,
21336  static_cast<VkOpticalFlowSessionNV>( session ),
21337  static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
21338  static_cast<VkImageView>( view ),
21339  static_cast<VkImageLayout>( layout ) ) );
21340  }
21341 #else
21342  template <typename Dispatch>
21348  Dispatch const & d ) const
21349  {
21350  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21351 
21352  VkResult result = d.vkBindOpticalFlowSessionImageNV( m_device,
21353  static_cast<VkOpticalFlowSessionNV>( session ),
21354  static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
21355  static_cast<VkImageView>( view ),
21356  static_cast<VkImageLayout>( layout ) );
21357  resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" );
21358 
21359  return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
21360  }
21361 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21362 
21363  template <typename Dispatch>
21366  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21367  {
21368  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21369  d.vkCmdOpticalFlowExecuteNV(
21370  m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) );
21371  }
21372 
21373 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21374  template <typename Dispatch>
21377  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21378  {
21379  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21380 
21381  d.vkCmdOpticalFlowExecuteNV(
21382  m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
21383  }
21384 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21385 
21386  //=== VK_QCOM_tile_properties ===
21387 
21388  template <typename Dispatch>
21390  uint32_t * pPropertiesCount,
21392  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21393  {
21394  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21395  return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM(
21396  m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
21397  }
21398 
21399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21400  template <typename TilePropertiesQCOMAllocator, typename Dispatch>
21403  {
21404  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21405 
21406  std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties;
21407  uint32_t propertiesCount;
21408  VkResult result;
21409  do
21410  {
21411  result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
21412  if ( ( result == VK_SUCCESS ) && propertiesCount )
21413  {
21414  properties.resize( propertiesCount );
21415  result = d.vkGetFramebufferTilePropertiesQCOM(
21416  m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
21417  }
21418  } while ( result == VK_INCOMPLETE );
21419 
21420  VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
21421  if ( propertiesCount < properties.size() )
21422  {
21423  properties.resize( propertiesCount );
21424  }
21425  return properties;
21426  }
21427 
21428  template <typename TilePropertiesQCOMAllocator,
21429  typename Dispatch,
21430  typename B1,
21434  TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,
21435  Dispatch const & d ) const
21436  {
21437  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21438 
21439  std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator );
21440  uint32_t propertiesCount;
21441  VkResult result;
21442  do
21443  {
21444  result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
21445  if ( ( result == VK_SUCCESS ) && propertiesCount )
21446  {
21447  properties.resize( propertiesCount );
21448  result = d.vkGetFramebufferTilePropertiesQCOM(
21449  m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
21450  }
21451  } while ( result == VK_INCOMPLETE );
21452 
21453  VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
21454  if ( propertiesCount < properties.size() )
21455  {
21456  properties.resize( propertiesCount );
21457  }
21458  return properties;
21459  }
21460 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21461 
21462  template <typename Dispatch>
21465  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
21466  {
21467  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21468  return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM(
21469  m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
21470  }
21471 
21472 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21473  template <typename Dispatch>
21476  {
21477  VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
21478 
21480  d.vkGetDynamicRenderingTilePropertiesQCOM(
21481  m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
21482 
21483  return properties;
21484  }
21485 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
21486 
21487 } // namespace VULKAN_HPP_NAMESPACE
21488 #endif
VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups(uint32_t *pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei GLenum GLsizei GLsizei GLuint memory
Definition: RE_OGL.h:202
void dispatchBaseKHR(uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR *pImportFenceFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint64_t getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint GLsizei const GLuint const GLintptr const GLsizeiptr * sizes
Definition: glcorearb.h:2621
void writeAccelerationStructuresPropertiesNV(uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV *pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT *pInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLenum query
Definition: glad.h:2772
void setViewportWScalingEnableNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t *pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR *pPresentModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint GLsizei const GLchar * message
Definition: glcorearb.h:2543
void getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksIndirectEXT(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT *pMessenger, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setPrimitiveTopologyEXT(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion *pYcbcrConversion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE *pBindingReference, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE *pHostMapping, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR *pMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLbitfield flags
Definition: glcorearb.h:1596
void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue *pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR *pSwapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLineStippleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR *pExecutableInfo, uint32_t *pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR *pStatistics, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT *pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Fence *pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type setEvent(VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLogicOpEXT(VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint32_t VkBool32
Definition: vulkan_core.h:93
void setMemoryPriorityEXT(VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void decompressMemoryNV(uint32_t decompressRegionCount, const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV *pDecompressMemoryRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier *pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBounds(float minDepthBounds, float maxDepthBounds, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksEXT(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex, uint32_t *pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR *pDisplays, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void traceRaysNV(VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthCompareOp(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setExclusiveScissorNV(uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D *pExclusiveScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NOEXCEPT
Definition: vulkan.hpp:200
void writeBufferMarker2AMD(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setPrimitiveTopology(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VkResult
Definition: vulkan_core.h:139
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type getRayTracingCaptureReplayShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Sampler, Dispatch > >::type createSamplerUnique(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::QueryPool, Dispatch > >::type createQueryPoolUnique(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR *pMemoryFdProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMemoryProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties *pMemoryProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL *pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Fence, Dispatch > >::type registerDisplayEventEXTUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT &displayEventInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t *pCounterValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
Definition: vulkan.hpp:205
void setCoverageModulationTableNV(uint32_t coverageModulationTableCount, const float *pCoverageModulationTable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
math::Stats statistics(const IterT &iter, bool threaded=true)
Iterate over a scalar grid and compute statistics (mean, variance, etc.) of the values of the voxels ...
Definition: Statistics.h:362
void beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo *pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::MicromapEXT *pMicromap, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMultiEXT(uint32_t drawCount, const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT *pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
Y
Definition: ImathEuler.h:184
void writeMicromapsPropertiesEXT(uint32_t micromapCount, const VULKAN_HPP_NAMESPACE::MicromapEXT *pMicromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot *pPrivateDataSlot, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch >, DescriptorSetAllocator > >::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo &allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::BufferView, Dispatch > >::type createBufferViewUnique(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type getRayTracingShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion *pYcbcrConversion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT *pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT *pLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void dispatch(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setProvokingVertexModeEXT(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewport(uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport *pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void
Definition: png.h:1083
ResultValueType< void >::type setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 *pResolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void subpassShadingHUAWEI(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 *pBlitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo *pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet *pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setHdrMetadataEXT(uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR *pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT *pMetadata, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result writeMicromapsPropertiesEXT(uint32_t micromapCount, const VULKAN_HPP_NAMESPACE::MicromapEXT *pMicromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void *pData, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLboolean * data
Definition: glcorearb.h:131
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges(uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange *pMemoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginTransformFeedbackEXT(uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer *pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL *pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer *pFramebuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT *pConditionalRenderingBegin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindDescriptorBufferEmbeddedSamplersEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch > >::type createAccelerationStructureKHRUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void endRenderPass(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void **ppData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo *pBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo *pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch > >::type createDebugUtilsMessengerEXTUnique(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
PFN_vkVoidFunction getProcAddr(const char *pName, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR *pAcquireInfo, uint32_t *pImageIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void updateDescriptorSets(uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet *pDescriptorCopies, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool *pDescriptorPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei const GLchar *const * string
Definition: glcorearb.h:814
VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV *pIndirectCommandsLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei const GLfloat * value
Definition: glcorearb.h:824
void getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo *pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport *pSupport, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch > >::type createSamplerYcbcrConversionKHRUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo *pWaitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMultiIndexedEXT(uint32_t drawCount, const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT *pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t *pVertexOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result submit(uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo *pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setColorBlendEnableEXT(uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::Bool32 *pColorBlendEnables, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setPolygonModeEXT(VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetEvent2KHR(VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
DeviceAddress getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setVertexInputEXT(uint32_t vertexBindingDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT *pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT *pVertexAttributeDescriptions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewportShadingRatePaletteNV(uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV *pShadingRatePalettes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR *pCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR *pPerformanceQueryCreateInfo, uint32_t *pNumPasses, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch > >::type createSharedSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
T const * data() const VULKAN_HPP_NOEXCEPT
Definition: vulkan.hpp:684
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch > >::type createDisplayPlaneSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getFeatures2() const VULKAN_HPP_NOEXCEPT
void copyQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result resetFences(uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence *pFences, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Pipeline *pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint64 GLenum handleType
Definition: RE_OGL.h:262
void copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 *pCopyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void clearColorImage(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue *pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange *pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRasterizerDiscardEnable(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV *pExecuteInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource *pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout *pLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindVertexBuffers2EXT(uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer *pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize *pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize *pStrides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT *pSrcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRasterizerDiscardEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion(uint32_t *pApiVersion, Dispatch const &d) VULKAN_HPP_NOEXCEPT
void copyMemoryIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetEvent(VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint64_t DeviceAddress
Definition: vulkan.hpp:6074
void setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties *pExternalBufferProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VkFlags VkColorComponentFlags
Definition: vulkan_core.h:2541
void resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch > >::type createCuFunctionNVXUnique(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool
void setViewportSwizzleNV(uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV *pViewportSwizzles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setFragmentShadingRateEnumNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Device *pDevice, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 *pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 *pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch > >::type allocateMemoryUnique(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo &allocateInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT *pIdentifier, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getToolProperties(uint32_t *pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties *pToolProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
X
Definition: ImathEuler.h:183
Result getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo *pRenderingInfo, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void blitImage(VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit *pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch > >::type createDescriptorSetLayoutUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void beginQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindIndexBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindVertexBuffers(uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer *pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch > >::type createPipelineCacheUnique(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
GLenum GLenum GLsizei void * image
Definition: glad.h:5132
void endConditionalRenderingEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getCheckpointDataNV(uint32_t *pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV *pCheckpointData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch > >::type createDescriptorUpdateTemplateUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setPrimitiveRestartEnable(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewportWScalingNV(uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV *pViewportWScalings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBoundsTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VkFlags VkPipelineStageFlags
Definition: vulkan_core.h:2401
**But if you need a result
Definition: thread.h:613
VULKAN_HPP_NODISCARD Result getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR *pGetFdInfo, int *pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setColorBlendAdvancedEXT(uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT *pColorBlendAdvanced, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setPrimitiveRestartEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint sampler
Definition: glcorearb.h:1656
void drawMeshTasksNV(uint32_t taskCount, uint32_t firstTask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t *pRectCount, VULKAN_HPP_NAMESPACE::Rect2D *pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch >, PipelineAllocator > > createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT(uint32_t *pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT *pTimeDomains, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t *pImageIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize *pCommittedMemoryInBytes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR *pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR *pCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT *pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
const GLuint * pipelines
Definition: glcorearb.h:1935
void getFormatProperties(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties *pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR *pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getGroupPeerMemoryFeatures(uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags *pPeerMemoryFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint buffer
Definition: glcorearb.h:660
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch > >::type createPrivateDataSlotUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Buffer *pBuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL *pOverrideInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexedIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void debugMarkerEndEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void traceRaysIndirect2KHR(VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCoverageModulationModeNV(VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void buildAccelerationStructuresKHR(uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void dispatchBase(uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void writeTimestamp(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch > >::type createIndirectCommandsLayoutNVUnique(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate *pDescriptorUpdateTemplate, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL *pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL *pConfiguration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBiasEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t *pDataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::RenderPass, Dispatch > >::type createRenderPass2KHRUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result submit2(uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 *pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint64_t getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR *pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo *pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getProperties2KHR() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getEventStatus(VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
GLuint GLsizei const GLuint const GLintptr * offsets
Definition: glcorearb.h:2621
void copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 *pCopyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX *pLaunchInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
struct _cl_event * event
Definition: glcorearb.h:2961
VULKAN_HPP_NODISCARD Result getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties *pExternalSemaphoreProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void draw(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch >, PipelineAllocator > > createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT(uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT *pTimestampInfos, uint64_t *pTimestamps, uint64_t *pMaxDeviation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 *pBlitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE *pDisplayTimingProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRasterizationSamplesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch > >::type getDrmDisplayEXTUnique(int32_t drmFd, uint32_t connectorId, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags *pPeerMemoryFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
DeviceAddress getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilCompareMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch > >::type createSamplerYcbcrConversionUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type getRayTracingShaderGroupHandleNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Buffer, Dispatch > >::type createBufferUnique(const VULKAN_HPP_NAMESPACE::BufferCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
GLint GLsizei GLsizei height
Definition: glcorearb.h:103
VULKAN_HPP_NODISCARD ResultValue< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch > > createGraphicsPipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR *pPipelineInfo, uint32_t *pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksIndirectCountEXT(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindTransformFeedbackBuffersEXT(uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer *pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize *pSizes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result bindSparse(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo *pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endTransformFeedbackEXT(uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer *pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch > >::type createDeferredOperationKHRUnique(Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setCoverageReductionModeNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image, uint32_t *pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements *pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Sampler *pSampler, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::CuModuleNVX *pModule, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setAlphaToOneEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint framebuffer
Definition: glcorearb.h:1287
void waitEvents2KHR(uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event *pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::RenderPass *pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout *pPipelineLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLocalDimmingAMD(VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT *pPipelineInfo, VULKAN_HPP_NAMESPACE::BaseOutStructure *pPipelineProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLintptr offset
Definition: glcorearb.h:665
void pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet *pDescriptorWrites, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthClipEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT *pBuildInfo, VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT *pSizeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void unmapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo *pCreateInfo, VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT *pIdentifier, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
Definition: core.h:760
void getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, VULKAN_HPP_NAMESPACE::DeviceSize *pOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
Definition: vulkan.hpp:225
void destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV *pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT *pCallback, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass, VULKAN_HPP_NAMESPACE::Extent2D *pMaxWorkgroupSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices(uint32_t *pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice *pPhysicalDevices, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D *pGranularity, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void() free(VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT *pMultisampleProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilReference(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setEvent(VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setScissorWithCount(uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D *pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexedIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result bindImageMemory2(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties *pExternalBufferProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getToolPropertiesEXT(uint32_t *pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties *pToolProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector< StructureChain > getQueueFamilyProperties2KHR() const
void beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo *pRenderingInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLineWidth(float lineWidth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NAMESPACE
Definition: vulkan.hpp:229
void destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void trimCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDiscardRectangleEXT(uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D *pDiscardRectangles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch > >::type createDebugReportCallbackEXTUnique(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthCompareOpEXT(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void(VKAPI_PTR * PFN_vkVoidFunction)(void)
Definition: vulkan_core.h:2918
void getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties *pExternalFenceProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLbitfield GLuint64 timeout
Definition: glcorearb.h:1599
DeviceAddress getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t *pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR *pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void **ppData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Semaphore, Dispatch > >::type createSemaphoreUnique(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void bindDescriptorBuffersEXT(uint32_t bufferCount, const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT *pBindingInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 *pCopyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setColorWriteMaskEXT(uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorComponentFlags *pColorWriteMasks, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint32_t size() const VULKAN_HPP_NOEXCEPT
Definition: vulkan.hpp:679
VULKAN_HPP_NODISCARD ResultValue< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch > > createRayTracingPipelineKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::RenderPass, Dispatch > >::type createRenderPass2Unique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D *pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLineStippleEXT(uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo *pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::ImageView *pView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Pipeline *pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT *pDisplayPowerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< DataType > getQueryPoolResult(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getFaultInfoEXT(VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT *pFaultCounts, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT *pFaultInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< VULKAN_HPP_NAMESPACE::SwapchainKHR >::type createSharedSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout *pSetLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLint GLint GLsizei GLint GLenum format
Definition: glcorearb.h:108
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch > >::type createDescriptorUpdateTemplateKHRUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch >, SwapchainKHRAllocator > >::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
uint32_t getDeferredOperationMaxConcurrencyKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLint GLenum GLboolean GLsizei stride
Definition: glcorearb.h:872
VULKAN_HPP_NODISCARD ResultValue< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch > > createComputePipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch > >::type createOpticalFlowSessionNVUnique(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewportWithCountEXT(uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport *pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR *pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR *pCompatibility, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
const GLuint * buffers
Definition: glcorearb.h:661
GLsizei const GLenum * attachments
Definition: glcorearb.h:2518
uint32_t getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void executeCommands(uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer *pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_INLINE void resultCheck(Result result, char const *message)
Definition: vulkan.hpp:6639
void setRasterizationStreamEXT(uint32_t rasterizationStream, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Fence, Dispatch > >::type registerEventEXTUnique(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT &deviceEventInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBoundsTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< VULKAN_HPP_NAMESPACE::Pipeline > createRayTracingPipelineNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
DeviceAddress getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo *pWaitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type end(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t *pDataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV *pAccelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 *pResolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void writeTimestamp2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
*get result *(waiting if necessary)*A common idiom is to fire a bunch of sub tasks at the queue
Definition: thread.h:623
void drawIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLint location
Definition: glcorearb.h:805
#define VULKAN_HPP_ASSERT
Definition: vulkan.hpp:70
void endRendering(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT *pCallbackData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< VULKAN_HPP_NAMESPACE::Pipeline > createGraphicsPipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT *pDescriptorInfo, size_t dataSize, void *pDescriptor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetQueryPoolEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, uint32_t *pPropertiesCount, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRayTracingPipelineStackSizeKHR(uint32_t pipelineStackSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch > >::type createPrivateDataSlotEXTUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR(uint32_t *pFragmentShadingRateCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR *pFragmentShadingRates, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NAMESPACE::CommandPool commandPool
VULKAN_HPP_NODISCARD ResultValue< VULKAN_HPP_NAMESPACE::Pipeline > createRayTracingPipelineKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 *pCopyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void nextSubpass(VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void waitEvents2(uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event *pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t *pInfoSize, void *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties *pExternalSemaphoreProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR *pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilOpEXT(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::RenderPass *pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NODISCARD
Definition: vulkan.hpp:224
GLuint const GLchar * name
Definition: glcorearb.h:786
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT *pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< std::pair< uint64_t, uint64_t > >::type getCalibratedTimestampEXT(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT &timestampInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch > > createRayTracingPipelineNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch > >::type createDescriptorPoolUnique(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache *pSrcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 *pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setFrontFaceEXT(VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type resetEvent(VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void bindInvocationMaskHUAWEI(VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges(uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange *pMemoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch > >::type createAccelerationStructureNVUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo *pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch > >::type createFramebufferUnique(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo *pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer *pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties *pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache *pPipelineCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei samples
Definition: glcorearb.h:1298
void preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV *pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLenum mode
Definition: glcorearb.h:99
GLint GLint GLsizei GLsizei GLsizei depth
Definition: glcorearb.h:476
VULKAN_HPP_NODISCARD ResultValueType< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch >, CommandBufferAllocator > >::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo &allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV *pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 *pCopyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT *pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Pipeline *pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t *pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDeviceMaskKHR(uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch > >::type acquirePerformanceConfigurationINTELUnique(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL &acquireInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setExtraPrimitiveOverestimationSizeEXT(float extraPrimitiveOverestimationSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthWriteEnable(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 *pCopyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR *pPresentInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilOp(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endRenderingKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::QueryPool *pQueryPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Fence, Dispatch > >::type createFenceUnique(const VULKAN_HPP_NAMESPACE::FenceCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthClampEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void buildAccelerationStructuresIndirectKHR(uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides, const uint32_t *const *ppMaxPrimitiveCounts, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t *pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image *pSwapchainImages, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilTestEnable(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t *pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCoverageToColorEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type writeAccelerationStructuresPropertyKHR(VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR > const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindShadingRateImageNV(VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR *pExecutableInfo, uint32_t *pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR *pInternalRepresentations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizeiptr size
Definition: glcorearb.h:664
void beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo *pRenderingInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void fillBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::BufferView *pView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLenum GLenum dst
Definition: glcorearb.h:1793
void setDepthWriteEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT *pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR *pCompatibility, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint shader
Definition: glcorearb.h:785
VULKAN_HPP_NODISCARD Result createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Image *pImage, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizeiptr const void GLenum usage
Definition: glcorearb.h:664
void setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV *pCustomSampleOrders, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type bindImageMemory(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setCoverageToColorLocationNV(uint32_t coverageToColorLocation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR(uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR *pSwapchains, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void writeBufferMarkerAMD(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV *pOpticalFlowImageFormatInfo, uint32_t *pFormatCount, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV *pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBias(float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR *pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, VULKAN_HPP_NAMESPACE::ImageView view, VULKAN_HPP_NAMESPACE::ImageLayout layout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR(uint32_t *pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setFrontFace(VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NAMESPACE_STRING
Definition: vulkan.hpp:234
VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL *pInitializeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR *pModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLenum GLsizei GLsizei GLint * values
Definition: glcorearb.h:1602
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch > >::type createMicromapEXTUnique(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
GLuint color
Definition: glcorearb.h:1261
void copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT
void bindPipeline(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Semaphore *pSemaphore, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 *pCopyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t *pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE *pPresentationTimings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Fence *pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getProperties2() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch > >::type createDisplayModeKHRUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT *pSampleLocationsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setScissor(uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D *pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer *pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, const VULKAN_HPP_NAMESPACE::SampleMask *pSampleMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo *pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void clearAttachments(uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment *pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect *pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 *pQueueInfo, VULKAN_HPP_NAMESPACE::Queue *pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyFence(VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Instance *pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT
void trimCommandPoolKHR(VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint index
Definition: glcorearb.h:786
void destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setConservativeRasterizationModeEXT(VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *pAccelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector< StructureChain > getQueueFamilyProperties2() const
VULKAN_HPP_NODISCARD Result waitForFences(uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence *pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void decompressMemoryIndirectCountNV(VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT *pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Fence *pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV(uint32_t *pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV *pCombinations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setColorWriteEnableEXT(uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::Bool32 *pColorWriteEnables, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCoverageModulationTableEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VK_HEADER_VERSION
Definition: vulkan_core.h:75
void copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getMemoryProperties2() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Pipeline *pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties *pExternalFenceProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT *pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL *pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::RenderPass *pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Device, Dispatch > >::type createDeviceUnique(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyImage(VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setColorBlendEquationEXT(uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT *pColorBlendEquations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void *pData, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements *pInfo, uint32_t *pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksIndirectNV(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV *pMemoryGetRemoteAddressInfo, VULKAN_HPP_NAMESPACE::RemoteAddressNV *pAddress, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, VULKAN_HPP_NAMESPACE::DeviceSize *pLayoutSizeInBytes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
DeviceSize getRayTracingShaderGroupStackSizeKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexedIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT *pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate *pDescriptorUpdateTemplate, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch > >::type createSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::CuFunctionNVX *pFunction, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange *pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch >, PipelineAllocator > > createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::CommandPool *pCommandPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCullModeEXT(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, uint32_t *pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR *pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBiasEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLint GLsizei width
Definition: glcorearb.h:103
VULKAN_HPP_NODISCARD Result createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV *pSession, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyEvent(VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
Definition: core.h:982
VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR(uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32 *pSupported, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint64_t VkDeviceSize
Definition: vulkan_core.h:95
void getProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type acquireDrmDisplayEXT(int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo *pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport *pSupport, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties *pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo *pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void dispatchIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::RenderPass, Dispatch > >::type createRenderPassUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT *pValidationCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::ImageView, Dispatch > >::type createImageViewUnique(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Event *pEvent, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksIndirectCountNV(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setShadingRateImageEnableNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDeviceMask(uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 *pCopyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch > >::type createCuModuleNVXUnique(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV *pExternalImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t *pBufferIndices, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexedIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexed(uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLogicOpEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 *pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRepresentativeFragmentTestEnableNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindVertexBuffers2(uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer *pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize *pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize *pStrides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch > >::type createShaderModuleUnique(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setLineRasterizationModeEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setBlendConstants(const float blendConstants[4], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getCheckpointData2NV(uint32_t *pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointData2NV *pCheckpointData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch > >::type createValidationCacheEXTUnique(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setStencilWriteMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result bindImageMemory2KHR(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setTessellationDomainOriginEXT(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDrmDisplayEXT(int32_t drmFd, uint32_t connectorId, VULKAN_HPP_NAMESPACE::DisplayKHR *display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Event, Dispatch > >::type createEventUnique(const VULKAN_HPP_NAMESPACE::EventCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Image, Dispatch > >::type createImageUnique(const VULKAN_HPP_NAMESPACE::ImageCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setAlphaToCoverageEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VkFlags VkQueryResultFlags
Definition: vulkan_core.h:2461
void getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *pBuildInfo, const uint32_t *pMaxPrimitiveCounts, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR *pSizeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
type
Definition: core.h:1059
void beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo *pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndirectByteCountEXT(uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex, uint32_t *pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR *pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR *pCounterDescriptions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch > >::type createPipelineLayoutUnique(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo *pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory *pMemory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch >, PipelineAllocator > > createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char *pLayerPrefix, const char *pMessage, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule *pShaderModule, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCullMode(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch > >::type createHeadlessSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setCheckpointNV(const void *pCheckpointMarker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
PFN_vkVoidFunction getProcAddr(const char *pName, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint64_t VkDeviceAddress
Definition: vulkan_core.h:94
VULKAN_HPP_NODISCARD Result signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo *pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_INLINE ResultValueType< void >::type createResultValueType(Result result)
Definition: vulkan.hpp:6608
VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT *pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void buildMicromapsEXT(uint32_t infoCount, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT *pInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void uninitializePerformanceApiINTEL(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< VULKAN_HPP_NAMESPACE::Pipeline > createComputePipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers *pImageSubresources, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void *pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getQueueFamilyProperties(uint32_t *pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties *pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result submit2KHR(uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 *pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint64_t getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint64 GLenum GLint fd
Definition: RE_OGL.h:262
void writeTimestamp2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
Result freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet *pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindPipelineShaderGroupNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDeferredOperationKHR(const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR *pDeferredOperation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result bindBufferMemory2(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type writeMicromapsPropertyEXT(VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::MicromapEXT > const &micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
Definition: format.h:895
void endQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR *pGetFdInfo, int *pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setSampleLocationsEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void waitEvents(uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event *pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier *pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewportWithCount(uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport *pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setScissorWithCountEXT(uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D *pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetEvent2(VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImage(VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei const GLuint const GLintptr const GLsizei * strides
Definition: glcorearb.h:2625
VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot *pPrivateDataSlot, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setPatchControlPointsEXT(uint32_t patchControlPoints, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void releaseProfilingLockKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties *pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
uint64_t getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_INLINE
Definition: vulkan.hpp:172
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Instance, Dispatch > >::type createInstanceUnique(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator, Dispatch const &d)
void destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::CommandPool, Dispatch > >::type createCommandPoolUnique(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getFeatures(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures *pFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLint GLint GLint GLint GLint GLint GLint GLbitfield GLenum filter
Definition: glcorearb.h:1297
GLenum src
Definition: glcorearb.h:1793
void setDepthClipNegativeOneToOneEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setEvent2(VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT *pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT