HDK
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
vulkan.hpp
Go to the documentation of this file.
1 // Copyright 2015-2022 The Khronos Group Inc.
2 //
3 // SPDX-License-Identifier: Apache-2.0 OR MIT
4 //
5 
6 // This header is generated from the Khronos Vulkan XML API Registry.
7 
8 #ifndef VULKAN_HPP
9 #define VULKAN_HPP
10 
11 #if defined( _MSVC_LANG )
12 # define VULKAN_HPP_CPLUSPLUS _MSVC_LANG
13 #else
14 # define VULKAN_HPP_CPLUSPLUS __cplusplus
15 #endif
16 
17 #if 201703L < VULKAN_HPP_CPLUSPLUS
18 # define VULKAN_HPP_CPP_VERSION 20
19 #elif 201402L < VULKAN_HPP_CPLUSPLUS
20 # define VULKAN_HPP_CPP_VERSION 17
21 #elif 201103L < VULKAN_HPP_CPLUSPLUS
22 # define VULKAN_HPP_CPP_VERSION 14
23 #elif 199711L < VULKAN_HPP_CPLUSPLUS
24 # define VULKAN_HPP_CPP_VERSION 11
25 #else
26 # error "vulkan.hpp needs at least c++ standard version 11"
27 #endif
28 
29 #include <algorithm>
30 #include <array> // ArrayWrapperND
31 #include <string> // std::string
32 #include <vulkan/vulkan.h>
33 #if 17 <= VULKAN_HPP_CPP_VERSION
34 # include <string_view> // std::string_view
35 #endif
36 
37 #if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
38 # if !defined( VULKAN_HPP_NO_SMART_HANDLE )
39 # define VULKAN_HPP_NO_SMART_HANDLE
40 # endif
41 #else
42 # include <tuple> // std::tie
43 # include <vector> // std::vector
44 #endif
45 
46 #if !defined( VULKAN_HPP_NO_EXCEPTIONS )
47 # include <system_error> // std::is_error_code_enum
48 #endif
49 
50 #if defined( VULKAN_HPP_NO_CONSTRUCTORS )
51 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
52 # define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS
53 # endif
54 # if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
55 # define VULKAN_HPP_NO_UNION_CONSTRUCTORS
56 # endif
57 #endif
58 
59 #if defined( VULKAN_HPP_NO_SETTERS )
60 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
61 # define VULKAN_HPP_NO_STRUCT_SETTERS
62 # endif
63 # if !defined( VULKAN_HPP_NO_UNION_SETTERS )
64 # define VULKAN_HPP_NO_UNION_SETTERS
65 # endif
66 #endif
67 
68 #if !defined( VULKAN_HPP_ASSERT )
69 # include <cassert>
70 # define VULKAN_HPP_ASSERT assert
71 #endif
72 
73 #if !defined( VULKAN_HPP_ASSERT_ON_RESULT )
74 # define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT
75 #endif
76 
77 #if !defined( VULKAN_HPP_STATIC_ASSERT )
78 # define VULKAN_HPP_STATIC_ASSERT static_assert
79 #endif
80 
81 #if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL )
82 # define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
83 #endif
84 
85 #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
86 # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
87 # include <dlfcn.h>
88 # elif defined( _WIN32 )
89 typedef struct HINSTANCE__ * HINSTANCE;
90 # if defined( _WIN64 )
91 typedef int64_t( __stdcall * FARPROC )();
92 # else
93 typedef int( __stdcall * FARPROC )();
94 # endif
95 extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName );
96 extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule );
97 extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName );
98 # endif
99 #endif
100 
101 #if !defined( __has_include )
102 # define __has_include( x ) false
103 #endif
104 
105 #if ( 201907 <= __cpp_lib_three_way_comparison ) && __has_include( <compare> ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR )
106 # define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
107 #endif
108 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
109 # include <compare>
110 #endif
111 
112 #if ( 201803 <= __cpp_lib_span )
113 # define VULKAN_HPP_SUPPORT_SPAN
114 # include <span>
115 #endif
116 
117 static_assert( VK_HEADER_VERSION == 236, "Wrong VK_HEADER_VERSION!" );
118 
119 // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
120 // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
121 #if ( VK_USE_64_BIT_PTR_DEFINES == 1 )
122 # if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
123 # define VULKAN_HPP_TYPESAFE_CONVERSION
124 # endif
125 #endif
126 
127 // <tuple> includes <sys/sysmacros.h> through some other header
128 // this results in major(x) being resolved to gnu_dev_major(x)
129 // which is an expression in a constructor initializer list.
130 #if defined( major )
131 # undef major
132 #endif
133 #if defined( minor )
134 # undef minor
135 #endif
136 
137 // Windows defines MemoryBarrier which is deprecated and collides
138 // with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct.
139 #if defined( MemoryBarrier )
140 # undef MemoryBarrier
141 #endif
142 
143 #if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS )
144 # if defined( __clang__ )
145 # if __has_feature( cxx_unrestricted_unions )
146 # define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
147 # endif
148 # elif defined( __GNUC__ )
149 # define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ )
150 # if 40600 <= GCC_VERSION
151 # define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
152 # endif
153 # elif defined( _MSC_VER )
154 # if 1900 <= _MSC_VER
155 # define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
156 # endif
157 # endif
158 #endif
159 
160 #if !defined( VULKAN_HPP_INLINE )
161 # if defined( __clang__ )
162 # if __has_attribute( always_inline )
163 # define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
164 # else
165 # define VULKAN_HPP_INLINE inline
166 # endif
167 # elif defined( __GNUC__ )
168 # define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
169 # elif defined( _MSC_VER )
170 # define VULKAN_HPP_INLINE inline
171 # else
172 # define VULKAN_HPP_INLINE inline
173 # endif
174 #endif
175 
176 #if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
177 # define VULKAN_HPP_TYPESAFE_EXPLICIT
178 #else
179 # define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
180 #endif
181 
182 #if defined( __cpp_constexpr )
183 # define VULKAN_HPP_CONSTEXPR constexpr
184 # if __cpp_constexpr >= 201304
185 # define VULKAN_HPP_CONSTEXPR_14 constexpr
186 # else
187 # define VULKAN_HPP_CONSTEXPR_14
188 # endif
189 # define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr
190 #else
191 # define VULKAN_HPP_CONSTEXPR
192 # define VULKAN_HPP_CONSTEXPR_14
193 # define VULKAN_HPP_CONST_OR_CONSTEXPR const
194 #endif
195 
196 #if !defined( VULKAN_HPP_NOEXCEPT )
197 # if defined( _MSC_VER ) && ( _MSC_VER <= 1800 )
198 # define VULKAN_HPP_NOEXCEPT
199 # else
200 # define VULKAN_HPP_NOEXCEPT noexcept
201 # define VULKAN_HPP_HAS_NOEXCEPT 1
202 # if defined( VULKAN_HPP_NO_EXCEPTIONS )
203 # define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept
204 # else
205 # define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
206 # endif
207 # endif
208 #endif
209 
210 #if 14 <= VULKAN_HPP_CPP_VERSION
211 # define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]]
212 #else
213 # define VULKAN_HPP_DEPRECATED( msg )
214 #endif
215 
216 #if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS )
217 # define VULKAN_HPP_NODISCARD [[nodiscard]]
218 # if defined( VULKAN_HPP_NO_EXCEPTIONS )
219 # define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]]
220 # else
221 # define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
222 # endif
223 #else
224 # define VULKAN_HPP_NODISCARD
225 # define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
226 #endif
227 
228 #if !defined( VULKAN_HPP_NAMESPACE )
229 # define VULKAN_HPP_NAMESPACE vk
230 #endif
231 
232 #define VULKAN_HPP_STRINGIFY2( text ) #text
233 #define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text )
234 #define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE )
235 
237 {
238  template <typename T, size_t N>
239  class ArrayWrapper1D : public std::array<T, N>
240  {
241  public:
243 
244  VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT : std::array<T, N>( data ) {}
245 
246 #if ( VK_USE_64_BIT_PTR_DEFINES == 0 )
247  // on 32 bit compiles, needs overloads on index type int to resolve ambiguities
249  {
250  return std::array<T, N>::operator[]( index );
251  }
252 
254  {
255  return std::array<T, N>::operator[]( index );
256  }
257 #endif
258 
259  operator T const *() const VULKAN_HPP_NOEXCEPT
260  {
261  return this->data();
262  }
263 
264  operator T *() VULKAN_HPP_NOEXCEPT
265  {
266  return this->data();
267  }
268 
270  operator std::string() const
271  {
272  return std::string( this->data() );
273  }
274 
275 #if 17 <= VULKAN_HPP_CPP_VERSION
277  operator std::string_view() const
278  {
279  return std::string_view( this->data() );
280  }
281 #endif
282 
283 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
285  std::strong_ordering operator<=>( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
286  {
287  return *static_cast<std::array<char, N> const *>( this ) <=> *static_cast<std::array<char, N> const *>( &rhs );
288  }
289 #else
291  bool operator<( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
292  {
293  return *static_cast<std::array<char, N> const *>( this ) < *static_cast<std::array<char, N> const *>( &rhs );
294  }
295 
297  bool operator<=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
298  {
299  return *static_cast<std::array<char, N> const *>( this ) <= *static_cast<std::array<char, N> const *>( &rhs );
300  }
301 
304  {
305  return *static_cast<std::array<char, N> const *>( this ) > *static_cast<std::array<char, N> const *>( &rhs );
306  }
307 
310  {
311  return *static_cast<std::array<char, N> const *>( this ) >= *static_cast<std::array<char, N> const *>( &rhs );
312  }
313 #endif
314 
317  {
318  return *static_cast<std::array<char, N> const *>( this ) == *static_cast<std::array<char, N> const *>( &rhs );
319  }
320 
323  {
324  return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs );
325  }
326  };
327 
328  // specialization of relational operators between std::string and arrays of chars
329  template <size_t N>
330  bool operator<( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
331  {
332  return lhs < rhs.data();
333  }
334 
335  template <size_t N>
336  bool operator<=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
337  {
338  return lhs <= rhs.data();
339  }
340 
341  template <size_t N>
343  {
344  return lhs > rhs.data();
345  }
346 
347  template <size_t N>
349  {
350  return lhs >= rhs.data();
351  }
352 
353  template <size_t N>
355  {
356  return lhs == rhs.data();
357  }
358 
359  template <size_t N>
361  {
362  return lhs != rhs.data();
363  }
364 
365  template <typename T, size_t N, size_t M>
366  class ArrayWrapper2D : public std::array<ArrayWrapper1D<T, M>, N>
367  {
368  public:
370 
371  VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array<std::array<T, M>, N> const & data ) VULKAN_HPP_NOEXCEPT
372  : std::array<ArrayWrapper1D<T, M>, N>( *reinterpret_cast<std::array<ArrayWrapper1D<T, M>, N> const *>( &data ) )
373  {
374  }
375  };
376 
377  template <typename FlagBitsType>
378  struct FlagTraits
379  {
381  };
382 
383  template <typename BitType>
384  class Flags
385  {
386  public:
388 
389  // constructors
391 
392  VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast<MaskType>( bit ) ) {}
393 
395 
397 
398  // relational operators
399 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
400  auto operator<=>( Flags<BitType> const & ) const = default;
401 #else
402  VULKAN_HPP_CONSTEXPR bool operator<( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
403  {
404  return m_mask < rhs.m_mask;
405  }
406 
407  VULKAN_HPP_CONSTEXPR bool operator<=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
408  {
409  return m_mask <= rhs.m_mask;
410  }
411 
413  {
414  return m_mask > rhs.m_mask;
415  }
416 
418  {
419  return m_mask >= rhs.m_mask;
420  }
421 
423  {
424  return m_mask == rhs.m_mask;
425  }
426 
428  {
429  return m_mask != rhs.m_mask;
430  }
431 #endif
432 
433  // logical operator
435  {
436  return !m_mask;
437  }
438 
439  // bitwise operators
441  {
442  return Flags<BitType>( m_mask & rhs.m_mask );
443  }
444 
446  {
447  return Flags<BitType>( m_mask | rhs.m_mask );
448  }
449 
451  {
452  return Flags<BitType>( m_mask ^ rhs.m_mask );
453  }
454 
456  {
457  return Flags<BitType>( m_mask ^ FlagTraits<BitType>::allFlags.m_mask );
458  }
459 
460  // assignment operators
462 
464  {
465  m_mask |= rhs.m_mask;
466  return *this;
467  }
468 
470  {
471  m_mask &= rhs.m_mask;
472  return *this;
473  }
474 
476  {
477  m_mask ^= rhs.m_mask;
478  return *this;
479  }
480 
481  // cast operators
483  {
484  return !!m_mask;
485  }
486 
488  {
489  return m_mask;
490  }
491 
492 #if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC )
493  public:
494 #else
495  private:
496 #endif
497  MaskType m_mask;
498  };
499 
500 #if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
501  // relational operators only needed for pre C++20
502  template <typename BitType>
503  VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
504  {
505  return flags.operator>( bit );
506  }
507 
508  template <typename BitType>
509  VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
510  {
511  return flags.operator>=( bit );
512  }
513 
514  template <typename BitType>
516  {
517  return flags.operator<( bit );
518  }
519 
520  template <typename BitType>
522  {
523  return flags.operator<=( bit );
524  }
525 
526  template <typename BitType>
528  {
529  return flags.operator==( bit );
530  }
531 
532  template <typename BitType>
534  {
535  return flags.operator!=( bit );
536  }
537 #endif
538 
539  // bitwise operators
540  template <typename BitType>
542  {
543  return flags.operator&( bit );
544  }
545 
546  template <typename BitType>
548  {
549  return flags.operator|( bit );
550  }
551 
552  template <typename BitType>
554  {
555  return flags.operator^( bit );
556  }
557 
558  // bitwise operators on BitType
559  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
561  {
562  return Flags<BitType>( lhs ) & rhs;
563  }
564 
565  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
567  {
568  return Flags<BitType>( lhs ) | rhs;
569  }
570 
571  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
573  {
574  return Flags<BitType>( lhs ) ^ rhs;
575  }
576 
577  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
579  {
580  return ~( Flags<BitType>( bit ) );
581  }
582 
583 #if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
584  template <typename T>
586  {
587  public:
589  : m_count( 0 )
590  , m_ptr( nullptr )
591  {
592  }
593 
595  : m_count( 0 )
596  , m_ptr( nullptr )
597  {
598  }
599 
601  : m_count( 1 )
602  , m_ptr( &value )
603  {
604  }
605 
606  ArrayProxy( uint32_t count, T const * ptr ) VULKAN_HPP_NOEXCEPT
607  : m_count( count )
608  , m_ptr( ptr )
609  {
610  }
611 
612  template <std::size_t C>
613  ArrayProxy( T const ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT
614  : m_count( C )
615  , m_ptr( ptr )
616  {
617  }
618 
619 # if __GNUC__ >= 9
620 # pragma GCC diagnostic push
621 # pragma GCC diagnostic ignored "-Winit-list-lifetime"
622 # endif
623 
624  ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
625  : m_count( static_cast<uint32_t>( list.size() ) )
626  , m_ptr( list.begin() )
627  {
628  }
629 
631  ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
632  : m_count( static_cast<uint32_t>( list.size() ) )
633  , m_ptr( list.begin() )
634  {
635  }
636 
637 # if __GNUC__ >= 9
638 # pragma GCC diagnostic pop
639 # endif
640 
641  // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly
642  // convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement.
643  template <typename V,
644  typename std::enable_if<std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
645  std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
647  : m_count( static_cast<uint32_t>( v.size() ) )
648  , m_ptr( v.data() )
649  {
650  }
651 
653  {
654  return m_ptr;
655  }
656 
658  {
659  return m_ptr + m_count;
660  }
661 
663  {
664  VULKAN_HPP_ASSERT( m_count && m_ptr );
665  return *m_ptr;
666  }
667 
669  {
670  VULKAN_HPP_ASSERT( m_count && m_ptr );
671  return *( m_ptr + m_count - 1 );
672  }
673 
675  {
676  return ( m_count == 0 );
677  }
678 
680  {
681  return m_count;
682  }
683 
685  {
686  return m_ptr;
687  }
688 
689  private:
690  uint32_t m_count;
691  T const * m_ptr;
692  };
693 
694  template <typename T>
696  {
697  public:
699  : m_count( 0 )
700  , m_ptr( nullptr )
701  {
702  }
703 
705  : m_count( 0 )
706  , m_ptr( nullptr )
707  {
708  }
709 
711  : m_count( 1 )
712  , m_ptr( &value )
713  {
714  }
715 
716  template <typename V>
717  ArrayProxyNoTemporaries( V && value ) = delete;
718 
721  : m_count( 1 )
722  , m_ptr( &value )
723  {
724  }
725 
728 
730  : m_count( count )
731  , m_ptr( ptr )
732  {
733  }
734 
737  : m_count( count )
738  , m_ptr( ptr )
739  {
740  }
741 
742  template <std::size_t C>
744  : m_count( C )
745  , m_ptr( ptr )
746  {
747  }
748 
749  template <std::size_t C>
750  ArrayProxyNoTemporaries( T( &&ptr )[C] ) = delete;
751 
754  : m_count( C )
755  , m_ptr( ptr )
756  {
757  }
758 
760  ArrayProxyNoTemporaries( typename std::remove_const<T>::type( &&ptr )[C] ) = delete;
761 
762  ArrayProxyNoTemporaries( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
763  : m_count( static_cast<uint32_t>( list.size() ) )
764  , m_ptr( list.begin() )
765  {
766  }
767 
768  ArrayProxyNoTemporaries( std::initializer_list<T> const && list ) = delete;
769 
771  ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
772  : m_count( static_cast<uint32_t>( list.size() ) )
773  , m_ptr( list.begin() )
774  {
775  }
776 
778  ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const && list ) = delete;
779 
780  ArrayProxyNoTemporaries( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
781  : m_count( static_cast<uint32_t>( list.size() ) )
782  , m_ptr( list.begin() )
783  {
784  }
785 
786  ArrayProxyNoTemporaries( std::initializer_list<T> && list ) = delete;
787 
790  : m_count( static_cast<uint32_t>( list.size() ) )
791  , m_ptr( list.begin() )
792  {
793  }
794 
796  ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> && list ) = delete;
797 
798  // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t.
799  template <typename V,
800  typename std::enable_if<std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
801  std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
803  : m_count( static_cast<uint32_t>( v.size() ) )
804  , m_ptr( v.data() )
805  {
806  }
807 
809  {
810  return m_ptr;
811  }
812 
814  {
815  return m_ptr + m_count;
816  }
817 
819  {
820  VULKAN_HPP_ASSERT( m_count && m_ptr );
821  return *m_ptr;
822  }
823 
825  {
826  VULKAN_HPP_ASSERT( m_count && m_ptr );
827  return *( m_ptr + m_count - 1 );
828  }
829 
831  {
832  return ( m_count == 0 );
833  }
834 
836  {
837  return m_count;
838  }
839 
841  {
842  return m_ptr;
843  }
844 
845  private:
846  uint32_t m_count;
847  T * m_ptr;
848  };
849 
850  template <typename T>
852  {
853  public:
855 
856  StridedArrayProxy( uint32_t count, T const * ptr, uint32_t stride ) VULKAN_HPP_NOEXCEPT
857  : ArrayProxy<T>( count, ptr )
858  , m_stride( stride )
859  {
860  VULKAN_HPP_ASSERT( sizeof( T ) <= stride );
861  }
862 
863  using ArrayProxy<T>::begin;
864 
866  {
867  return reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + size() * m_stride );
868  }
869 
870  using ArrayProxy<T>::front;
871 
873  {
874  VULKAN_HPP_ASSERT( begin() && size() );
875  return *reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + ( size() - 1 ) * m_stride );
876  }
877 
878  using ArrayProxy<T>::empty;
879  using ArrayProxy<T>::size;
880  using ArrayProxy<T>::data;
881 
882  uint32_t stride() const
883  {
884  return m_stride;
885  }
886 
887  private:
888  uint32_t m_stride = sizeof( T );
889  };
890 
891  template <typename RefType>
892  class Optional
893  {
894  public:
895  Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT
896  {
897  m_ptr = &reference;
898  }
900  {
901  m_ptr = ptr;
902  }
903  Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
904  {
905  m_ptr = nullptr;
906  }
907 
908  operator RefType *() const VULKAN_HPP_NOEXCEPT
909  {
910  return m_ptr;
911  }
913  {
914  return m_ptr;
915  }
916  explicit operator bool() const VULKAN_HPP_NOEXCEPT
917  {
918  return !!m_ptr;
919  }
920 
921  private:
922  RefType * m_ptr;
923  };
924 
925  template <typename X, typename Y>
927  {
928  enum
929  {
930  value = false
931  };
932  };
933 
934  template <typename Type, class...>
936  {
937  static const bool valid = false;
938  };
939 
940  template <typename Type, typename Head, typename... Tail>
941  struct IsPartOfStructureChain<Type, Head, Tail...>
942  {
944  };
945 
946  template <size_t Index, typename T, typename... ChainElements>
948  {
949  static const bool value = std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
950  StructureChainContains<Index - 1, T, ChainElements...>::value;
951  };
952 
953  template <typename T, typename... ChainElements>
954  struct StructureChainContains<0, T, ChainElements...>
955  {
956  static const bool value = std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
957  };
958 
959  template <size_t Index, typename... ChainElements>
961  {
962  using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
963  static const bool valid = StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
964  ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) &&
966  };
967 
968  template <typename... ChainElements>
969  struct StructureChainValidation<0, ChainElements...>
970  {
971  static const bool valid = true;
972  };
973 
974  template <typename... ChainElements>
975  class StructureChain : public std::tuple<ChainElements...>
976  {
977  public:
979  {
980  static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" );
981  link<sizeof...( ChainElements ) - 1>();
982  }
983 
984  StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( rhs )
985  {
986  static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" );
987  link( &std::get<0>( *this ),
988  &std::get<0>( rhs ),
989  reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
990  reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
991  }
992 
993  StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( std::forward<std::tuple<ChainElements...>>( rhs ) )
994  {
995  static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" );
996  link( &std::get<0>( *this ),
997  &std::get<0>( rhs ),
998  reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
999  reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
1000  }
1001 
1002  StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( elems... )
1003  {
1004  static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" );
1005  link<sizeof...( ChainElements ) - 1>();
1006  }
1007 
1009  {
1010  std::tuple<ChainElements...>::operator=( rhs );
1011  link( &std::get<0>( *this ),
1012  &std::get<0>( rhs ),
1013  reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
1014  reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
1015  return *this;
1016  }
1017 
1018  StructureChain & operator=( StructureChain && rhs ) = delete;
1019 
1020  template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
1022  {
1023  return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...> &>( *this ) );
1024  }
1025 
1026  template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
1027  T const & get() const VULKAN_HPP_NOEXCEPT
1028  {
1029  return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...> const &>( *this ) );
1030  }
1031 
1032  template <typename T0, typename T1, typename... Ts>
1033  std::tuple<T0 &, T1 &, Ts &...> get() VULKAN_HPP_NOEXCEPT
1034  {
1035  return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
1036  }
1037 
1038  template <typename T0, typename T1, typename... Ts>
1039  std::tuple<T0 const &, T1 const &, Ts const &...> get() const VULKAN_HPP_NOEXCEPT
1040  {
1041  return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
1042  }
1043 
1044  template <typename ClassType, size_t Which = 0>
1045  typename std::enable_if<std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value && ( Which == 0 ), bool>::type
1047  {
1048  return true;
1049  }
1050 
1051  template <typename ClassType, size_t Which = 0>
1052  typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || ( Which != 0 ), bool>::type
1054  {
1055  static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!" );
1056  return isLinked( reinterpret_cast<VkBaseInStructure const *>( &get<ClassType, Which>() ) );
1057  }
1058 
1059  template <typename ClassType, size_t Which = 0>
1060  typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || ( Which != 0 ), void>::type
1062  {
1063  static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't relink Structure that's not part of this StructureChain!" );
1064  auto pNext = reinterpret_cast<VkBaseInStructure *>( &get<ClassType, Which>() );
1065  VULKAN_HPP_ASSERT( !isLinked( pNext ) );
1066  auto & headElement = std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) );
1067  pNext->pNext = reinterpret_cast<VkBaseInStructure const *>( headElement.pNext );
1068  headElement.pNext = pNext;
1069  }
1070 
1071  template <typename ClassType, size_t Which = 0>
1072  typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || ( Which != 0 ), void>::type
1074  {
1075  static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!" );
1076  unlink( reinterpret_cast<VkBaseOutStructure const *>( &get<ClassType, Which>() ) );
1077  }
1078 
1079  private:
1080  template <int Index, typename T, int Which, typename, class First, class... Types>
1081  struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...>
1082  {
1083  };
1084 
1085  template <int Index, typename T, int Which, class First, class... Types>
1086  struct ChainElementIndex<Index, T, Which, typename std::enable_if<!std::is_same<T, First>::value, void>::type, First, Types...>
1087  : ChainElementIndex<Index + 1, T, Which, void, Types...>
1088  {
1089  };
1090 
1091  template <int Index, typename T, int Which, class First, class... Types>
1092  struct ChainElementIndex<Index, T, Which, typename std::enable_if<std::is_same<T, First>::value, void>::type, First, Types...>
1093  : ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
1094  {
1095  };
1096 
1097  template <int Index, typename T, class First, class... Types>
1098  struct ChainElementIndex<Index, T, 0, typename std::enable_if<std::is_same<T, First>::value, void>::type, First, Types...>
1099  : std::integral_constant<int, Index>
1100  {
1101  };
1102 
1103  bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT
1104  {
1105  VkBaseInStructure const * elementPtr =
1106  reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( static_cast<std::tuple<ChainElements...> const &>( *this ) ) );
1107  while ( elementPtr )
1108  {
1109  if ( elementPtr->pNext == pNext )
1110  {
1111  return true;
1112  }
1113  elementPtr = elementPtr->pNext;
1114  }
1115  return false;
1116  }
1117 
1118  template <size_t Index>
1120  {
1121  auto & x = std::get<Index - 1>( static_cast<std::tuple<ChainElements...> &>( *this ) );
1122  x.pNext = &std::get<Index>( static_cast<std::tuple<ChainElements...> &>( *this ) );
1123  link<Index - 1>();
1124  }
1125 
1126  template <size_t Index>
1128  {
1129  }
1130 
1131  void link( void * dstBase, void const * srcBase, VkBaseOutStructure * dst, VkBaseInStructure const * src )
1132  {
1133  while ( src->pNext )
1134  {
1135  std::ptrdiff_t offset = reinterpret_cast<char const *>( src->pNext ) - reinterpret_cast<char const *>( srcBase );
1136  dst->pNext = reinterpret_cast<VkBaseOutStructure *>( reinterpret_cast<char *>( dstBase ) + offset );
1137  dst = dst->pNext;
1138  src = src->pNext;
1139  }
1140  dst->pNext = nullptr;
1141  }
1142 
1143  void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
1144  {
1145  VkBaseOutStructure * elementPtr = reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) ) );
1146  while ( elementPtr && ( elementPtr->pNext != pNext ) )
1147  {
1148  elementPtr = elementPtr->pNext;
1149  }
1150  if ( elementPtr )
1151  {
1152  elementPtr->pNext = pNext->pNext;
1153  }
1154  else
1155  {
1156  VULKAN_HPP_ASSERT( false ); // fires, if the ClassType member has already been unlinked !
1157  }
1158  }
1159  };
1160 
1161 # if !defined( VULKAN_HPP_NO_SMART_HANDLE )
1162  template <typename Type, typename Dispatch>
1164 
1165  template <typename Type, typename Dispatch>
1166  class UniqueHandle : public UniqueHandleTraits<Type, Dispatch>::deleter
1167  {
1168  private:
1169  using Deleter = typename UniqueHandleTraits<Type, Dispatch>::deleter;
1170 
1171  public:
1173 
1174  UniqueHandle() : Deleter(), m_value() {}
1175 
1176  explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
1177  : Deleter( deleter )
1178  , m_value( value )
1179  {
1180  }
1181 
1182  UniqueHandle( UniqueHandle const & ) = delete;
1183 
1185  : Deleter( std::move( static_cast<Deleter &>( other ) ) )
1186  , m_value( other.release() )
1187  {
1188  }
1189 
1191  {
1192  if ( m_value )
1193  {
1194  this->destroy( m_value );
1195  }
1196  }
1197 
1198  UniqueHandle & operator=( UniqueHandle const & ) = delete;
1199 
1201  {
1202  reset( other.release() );
1203  *static_cast<Deleter *>( this ) = std::move( static_cast<Deleter &>( other ) );
1204  return *this;
1205  }
1206 
1207  explicit operator bool() const VULKAN_HPP_NOEXCEPT
1208  {
1209  return m_value.operator bool();
1210  }
1211 
1213  {
1214  return &m_value;
1215  }
1216 
1218  {
1219  return &m_value;
1220  }
1221 
1223  {
1224  return m_value;
1225  }
1226 
1228  {
1229  return m_value;
1230  }
1231 
1232  const Type & get() const VULKAN_HPP_NOEXCEPT
1233  {
1234  return m_value;
1235  }
1236 
1238  {
1239  return m_value;
1240  }
1241 
1242  void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT
1243  {
1244  if ( m_value != value )
1245  {
1246  if ( m_value )
1247  {
1248  this->destroy( m_value );
1249  }
1250  m_value = value;
1251  }
1252  }
1253 
1255  {
1256  Type value = m_value;
1257  m_value = nullptr;
1258  return value;
1259  }
1260 
1262  {
1263  std::swap( m_value, rhs.m_value );
1264  std::swap( static_cast<Deleter &>( *this ), static_cast<Deleter &>( rhs ) );
1265  }
1266 
1267  private:
1268  Type m_value;
1269  };
1270 
1271  template <typename UniqueType>
1272  VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type> uniqueToRaw( std::vector<UniqueType> const & handles )
1273  {
1274  std::vector<typename UniqueType::element_type> newBuffer( handles.size() );
1275  std::transform( handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } );
1276  return newBuffer;
1277  }
1278 
1279  template <typename Type, typename Dispatch>
1281  {
1282  lhs.swap( rhs );
1283  }
1284 # endif
1285 #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE
1286 
1288  {
1289  public:
1290  DispatchLoaderBase() = default;
1291  DispatchLoaderBase( std::nullptr_t )
1292 #if !defined( NDEBUG )
1293  : m_valid( false )
1294 #endif
1295  {
1296  }
1297 
1298 #if !defined( NDEBUG )
1299  size_t getVkHeaderVersion() const
1300  {
1301  VULKAN_HPP_ASSERT( m_valid );
1302  return vkHeaderVersion;
1303  }
1304 
1305  private:
1306  size_t vkHeaderVersion = VK_HEADER_VERSION;
1307  bool m_valid = true;
1308 #endif
1309  };
1310 
1311 #if !defined( VK_NO_PROTOTYPES )
1313  {
1314  public:
1315  //=== VK_VERSION_1_0 ===
1316 
1317  VkResult
1318  vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT
1319  {
1320  return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
1321  }
1322 
1323  void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1324  {
1325  return ::vkDestroyInstance( instance, pAllocator );
1326  }
1327 
1328  VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
1329  {
1330  return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
1331  }
1332 
1333  void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT
1334  {
1335  return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
1336  }
1337 
1338  void
1339  vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
1340  {
1341  return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
1342  }
1343 
1344  VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice,
1345  VkFormat format,
1346  VkImageType type,
1347  VkImageTiling tiling,
1350  VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
1351  {
1352  return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
1353  }
1354 
1355  void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
1356  {
1357  return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
1358  }
1359 
1360  void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice,
1361  uint32_t * pQueueFamilyPropertyCount,
1362  VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
1363  {
1364  return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
1365  }
1366 
1367  void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
1368  {
1369  return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
1370  }
1371 
1372  PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT
1373  {
1374  return ::vkGetInstanceProcAddr( instance, pName );
1375  }
1376 
1377  PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT
1378  {
1379  return ::vkGetDeviceProcAddr( device, pName );
1380  }
1381 
1382  VkResult vkCreateDevice( VkPhysicalDevice physicalDevice,
1383  const VkDeviceCreateInfo * pCreateInfo,
1384  const VkAllocationCallbacks * pAllocator,
1385  VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT
1386  {
1387  return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice );
1388  }
1389 
1390  void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1391  {
1392  return ::vkDestroyDevice( device, pAllocator );
1393  }
1394 
1396  uint32_t * pPropertyCount,
1397  VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
1398  {
1399  return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties );
1400  }
1401 
1402  VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice,
1403  const char * pLayerName,
1404  uint32_t * pPropertyCount,
1405  VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
1406  {
1407  return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
1408  }
1409 
1410  VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
1411  {
1412  return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
1413  }
1414 
1415  VkResult
1416  vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
1417  {
1418  return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
1419  }
1420 
1421  void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
1422  {
1423  return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue );
1424  }
1425 
1426  VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
1427  {
1428  return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
1429  }
1430 
1432  {
1434  }
1435 
1437  {
1438  return ::vkDeviceWaitIdle( device );
1439  }
1440 
1441  VkResult vkAllocateMemory( VkDevice device,
1442  const VkMemoryAllocateInfo * pAllocateInfo,
1443  const VkAllocationCallbacks * pAllocator,
1444  VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT
1445  {
1446  return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
1447  }
1448 
1449  void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1450  {
1451  return ::vkFreeMemory( device, memory, pAllocator );
1452  }
1453 
1454  VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const
1456  {
1457  return ::vkMapMemory( device, memory, offset, size, flags, ppData );
1458  }
1459 
1460  void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT
1461  {
1462  return ::vkUnmapMemory( device, memory );
1463  }
1464 
1465  VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
1466  {
1467  return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
1468  }
1469 
1470  VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
1471  {
1472  return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
1473  }
1474 
1475  void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT
1476  {
1477  return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes );
1478  }
1479 
1480  VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
1481  {
1482  return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
1483  }
1484 
1485  VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
1486  {
1487  return ::vkBindImageMemory( device, image, memory, memoryOffset );
1488  }
1489 
1490  void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
1491  {
1492  return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements );
1493  }
1494 
1495  void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
1496  {
1497  return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements );
1498  }
1499 
1500  void vkGetImageSparseMemoryRequirements( VkDevice device,
1501  VkImage image,
1502  uint32_t * pSparseMemoryRequirementCount,
1503  VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
1504  {
1505  return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
1506  }
1507 
1508  void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice,
1509  VkFormat format,
1510  VkImageType type,
1513  VkImageTiling tiling,
1514  uint32_t * pPropertyCount,
1516  {
1517  return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
1518  }
1519 
1520  VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT
1521  {
1522  return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
1523  }
1524 
1525  VkResult vkCreateFence( VkDevice device,
1526  const VkFenceCreateInfo * pCreateInfo,
1527  const VkAllocationCallbacks * pAllocator,
1528  VkFence * pFence ) const VULKAN_HPP_NOEXCEPT
1529  {
1530  return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
1531  }
1532 
1533  void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1534  {
1535  return ::vkDestroyFence( device, fence, pAllocator );
1536  }
1537 
1538  VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT
1539  {
1540  return ::vkResetFences( device, fenceCount, pFences );
1541  }
1542 
1543  VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT
1544  {
1545  return ::vkGetFenceStatus( device, fence );
1546  }
1547 
1548  VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
1549  {
1550  return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
1551  }
1552 
1553  VkResult vkCreateSemaphore( VkDevice device,
1554  const VkSemaphoreCreateInfo * pCreateInfo,
1555  const VkAllocationCallbacks * pAllocator,
1556  VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT
1557  {
1558  return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore );
1559  }
1560 
1561  void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1562  {
1563  return ::vkDestroySemaphore( device, semaphore, pAllocator );
1564  }
1565 
1566  VkResult vkCreateEvent( VkDevice device,
1567  const VkEventCreateInfo * pCreateInfo,
1568  const VkAllocationCallbacks * pAllocator,
1569  VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT
1570  {
1571  return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
1572  }
1573 
1574  void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1575  {
1576  return ::vkDestroyEvent( device, event, pAllocator );
1577  }
1578 
1579  VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
1580  {
1581  return ::vkGetEventStatus( device, event );
1582  }
1583 
1584  VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
1585  {
1586  return ::vkSetEvent( device, event );
1587  }
1588 
1589  VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
1590  {
1591  return ::vkResetEvent( device, event );
1592  }
1593 
1594  VkResult vkCreateQueryPool( VkDevice device,
1595  const VkQueryPoolCreateInfo * pCreateInfo,
1596  const VkAllocationCallbacks * pAllocator,
1597  VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT
1598  {
1599  return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool );
1600  }
1601 
1602  void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1603  {
1604  return ::vkDestroyQueryPool( device, queryPool, pAllocator );
1605  }
1606 
1608  VkQueryPool queryPool,
1609  uint32_t firstQuery,
1610  uint32_t queryCount,
1611  size_t dataSize,
1612  void * pData,
1615  {
1616  return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
1617  }
1618 
1619  VkResult vkCreateBuffer( VkDevice device,
1620  const VkBufferCreateInfo * pCreateInfo,
1621  const VkAllocationCallbacks * pAllocator,
1622  VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT
1623  {
1624  return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
1625  }
1626 
1627  void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1628  {
1629  return ::vkDestroyBuffer( device, buffer, pAllocator );
1630  }
1631 
1632  VkResult vkCreateBufferView( VkDevice device,
1633  const VkBufferViewCreateInfo * pCreateInfo,
1634  const VkAllocationCallbacks * pAllocator,
1635  VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT
1636  {
1637  return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
1638  }
1639 
1640  void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1641  {
1642  return ::vkDestroyBufferView( device, bufferView, pAllocator );
1643  }
1644 
1645  VkResult vkCreateImage( VkDevice device,
1646  const VkImageCreateInfo * pCreateInfo,
1647  const VkAllocationCallbacks * pAllocator,
1648  VkImage * pImage ) const VULKAN_HPP_NOEXCEPT
1649  {
1650  return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
1651  }
1652 
1653  void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1654  {
1655  return ::vkDestroyImage( device, image, pAllocator );
1656  }
1657 
1658  void vkGetImageSubresourceLayout( VkDevice device,
1659  VkImage image,
1660  const VkImageSubresource * pSubresource,
1661  VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT
1662  {
1663  return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout );
1664  }
1665 
1666  VkResult vkCreateImageView( VkDevice device,
1667  const VkImageViewCreateInfo * pCreateInfo,
1668  const VkAllocationCallbacks * pAllocator,
1669  VkImageView * pView ) const VULKAN_HPP_NOEXCEPT
1670  {
1671  return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
1672  }
1673 
1674  void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1675  {
1676  return ::vkDestroyImageView( device, imageView, pAllocator );
1677  }
1678 
1679  VkResult vkCreateShaderModule( VkDevice device,
1680  const VkShaderModuleCreateInfo * pCreateInfo,
1681  const VkAllocationCallbacks * pAllocator,
1682  VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT
1683  {
1684  return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule );
1685  }
1686 
1687  void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1688  {
1689  return ::vkDestroyShaderModule( device, shaderModule, pAllocator );
1690  }
1691 
1693  const VkPipelineCacheCreateInfo * pCreateInfo,
1694  const VkAllocationCallbacks * pAllocator,
1695  VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT
1696  {
1697  return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache );
1698  }
1699 
1700  void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1701  {
1702  return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator );
1703  }
1704 
1705  VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
1706  {
1707  return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
1708  }
1709 
1710  VkResult
1711  vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
1712  {
1713  return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
1714  }
1715 
1717  VkPipelineCache pipelineCache,
1718  uint32_t createInfoCount,
1719  const VkGraphicsPipelineCreateInfo * pCreateInfos,
1720  const VkAllocationCallbacks * pAllocator,
1721  VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
1722  {
1723  return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
1724  }
1725 
1727  VkPipelineCache pipelineCache,
1728  uint32_t createInfoCount,
1729  const VkComputePipelineCreateInfo * pCreateInfos,
1730  const VkAllocationCallbacks * pAllocator,
1731  VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
1732  {
1733  return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
1734  }
1735 
1736  void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1737  {
1738  return ::vkDestroyPipeline( device, pipeline, pAllocator );
1739  }
1740 
1742  const VkPipelineLayoutCreateInfo * pCreateInfo,
1743  const VkAllocationCallbacks * pAllocator,
1744  VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT
1745  {
1746  return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout );
1747  }
1748 
1749  void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1750  {
1751  return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator );
1752  }
1753 
1754  VkResult vkCreateSampler( VkDevice device,
1755  const VkSamplerCreateInfo * pCreateInfo,
1756  const VkAllocationCallbacks * pAllocator,
1757  VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT
1758  {
1759  return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler );
1760  }
1761 
1762  void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1763  {
1764  return ::vkDestroySampler( device, sampler, pAllocator );
1765  }
1766 
1768  const VkDescriptorSetLayoutCreateInfo * pCreateInfo,
1769  const VkAllocationCallbacks * pAllocator,
1770  VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT
1771  {
1772  return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout );
1773  }
1774 
1775  void vkDestroyDescriptorSetLayout( VkDevice device,
1776  VkDescriptorSetLayout descriptorSetLayout,
1777  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1778  {
1779  return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator );
1780  }
1781 
1783  const VkDescriptorPoolCreateInfo * pCreateInfo,
1784  const VkAllocationCallbacks * pAllocator,
1785  VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT
1786  {
1787  return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
1788  }
1789 
1790  void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1791  {
1792  return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator );
1793  }
1794 
1795  VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
1796  {
1797  return ::vkResetDescriptorPool( device, descriptorPool, flags );
1798  }
1799 
1801  const VkDescriptorSetAllocateInfo * pAllocateInfo,
1802  VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
1803  {
1804  return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets );
1805  }
1806 
1807  VkResult vkFreeDescriptorSets( VkDevice device,
1808  VkDescriptorPool descriptorPool,
1809  uint32_t descriptorSetCount,
1810  const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
1811  {
1812  return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets );
1813  }
1814 
1815  void vkUpdateDescriptorSets( VkDevice device,
1816  uint32_t descriptorWriteCount,
1817  const VkWriteDescriptorSet * pDescriptorWrites,
1818  uint32_t descriptorCopyCount,
1819  const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
1820  {
1821  return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
1822  }
1823 
1824  VkResult vkCreateFramebuffer( VkDevice device,
1825  const VkFramebufferCreateInfo * pCreateInfo,
1826  const VkAllocationCallbacks * pAllocator,
1827  VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT
1828  {
1829  return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
1830  }
1831 
1832  void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1833  {
1834  return ::vkDestroyFramebuffer( device, framebuffer, pAllocator );
1835  }
1836 
1837  VkResult vkCreateRenderPass( VkDevice device,
1838  const VkRenderPassCreateInfo * pCreateInfo,
1839  const VkAllocationCallbacks * pAllocator,
1840  VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT
1841  {
1842  return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass );
1843  }
1844 
1845  void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1846  {
1847  return ::vkDestroyRenderPass( device, renderPass, pAllocator );
1848  }
1849 
1850  void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT
1851  {
1852  return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
1853  }
1854 
1855  VkResult vkCreateCommandPool( VkDevice device,
1856  const VkCommandPoolCreateInfo * pCreateInfo,
1857  const VkAllocationCallbacks * pAllocator,
1858  VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT
1859  {
1860  return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
1861  }
1862 
1863  void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
1864  {
1865  return ::vkDestroyCommandPool( device, commandPool, pAllocator );
1866  }
1867 
1868  VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
1869  {
1870  return ::vkResetCommandPool( device, commandPool, flags );
1871  }
1872 
1874  const VkCommandBufferAllocateInfo * pAllocateInfo,
1875  VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
1876  {
1877  return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
1878  }
1879 
1880  void vkFreeCommandBuffers( VkDevice device,
1881  VkCommandPool commandPool,
1882  uint32_t commandBufferCount,
1883  const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
1884  {
1885  return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers );
1886  }
1887 
1888  VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
1889  {
1890  return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo );
1891  }
1892 
1893  VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
1894  {
1895  return ::vkEndCommandBuffer( commandBuffer );
1896  }
1897 
1899  {
1900  return ::vkResetCommandBuffer( commandBuffer, flags );
1901  }
1902 
1903  void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT
1904  {
1905  return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline );
1906  }
1907 
1908  void
1909  vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
1910  {
1911  return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports );
1912  }
1913 
1914  void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
1915  {
1916  return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors );
1917  }
1918 
1919  void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT
1920  {
1921  return ::vkCmdSetLineWidth( commandBuffer, lineWidth );
1922  }
1923 
1924  void vkCmdSetDepthBias( VkCommandBuffer commandBuffer,
1925  float depthBiasConstantFactor,
1926  float depthBiasClamp,
1927  float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
1928  {
1929  return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
1930  }
1931 
1932  void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
1933  {
1934  return ::vkCmdSetBlendConstants( commandBuffer, blendConstants );
1935  }
1936 
1937  void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
1938  {
1939  return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds );
1940  }
1941 
1942  void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
1943  {
1944  return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask );
1945  }
1946 
1947  void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
1948  {
1949  return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask );
1950  }
1951 
1952  void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
1953  {
1954  return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference );
1955  }
1956 
1957  void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer,
1958  VkPipelineBindPoint pipelineBindPoint,
1959  VkPipelineLayout layout,
1960  uint32_t firstSet,
1961  uint32_t descriptorSetCount,
1962  const VkDescriptorSet * pDescriptorSets,
1963  uint32_t dynamicOffsetCount,
1964  const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT
1965  {
1967  commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets );
1968  }
1969 
1970  void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT
1971  {
1972  return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType );
1973  }
1974 
1975  void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer,
1976  uint32_t firstBinding,
1977  uint32_t bindingCount,
1978  const VkBuffer * pBuffers,
1979  const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT
1980  {
1981  return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets );
1982  }
1983 
1984  void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
1986  {
1987  return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
1988  }
1989 
1990  void vkCmdDrawIndexed( VkCommandBuffer commandBuffer,
1991  uint32_t indexCount,
1992  uint32_t instanceCount,
1993  uint32_t firstIndex,
1994  int32_t vertexOffset,
1995  uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
1996  {
1997  return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
1998  }
1999 
2000  void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
2001  {
2002  return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride );
2003  }
2004 
2005  void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const
2007  {
2008  return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride );
2009  }
2010 
2011  void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
2012  {
2013  return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ );
2014  }
2015 
2016  void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
2017  {
2018  return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset );
2019  }
2020 
2021  void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const
2023  {
2024  return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions );
2025  }
2026 
2027  void vkCmdCopyImage( VkCommandBuffer commandBuffer,
2028  VkImage srcImage,
2029  VkImageLayout srcImageLayout,
2030  VkImage dstImage,
2031  VkImageLayout dstImageLayout,
2032  uint32_t regionCount,
2033  const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
2034  {
2035  return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
2036  }
2037 
2038  void vkCmdBlitImage( VkCommandBuffer commandBuffer,
2039  VkImage srcImage,
2040  VkImageLayout srcImageLayout,
2041  VkImage dstImage,
2042  VkImageLayout dstImageLayout,
2043  uint32_t regionCount,
2044  const VkImageBlit * pRegions,
2046  {
2047  return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
2048  }
2049 
2050  void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer,
2051  VkBuffer srcBuffer,
2052  VkImage dstImage,
2053  VkImageLayout dstImageLayout,
2054  uint32_t regionCount,
2055  const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
2056  {
2057  return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
2058  }
2059 
2060  void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer,
2061  VkImage srcImage,
2062  VkImageLayout srcImageLayout,
2063  VkBuffer dstBuffer,
2064  uint32_t regionCount,
2065  const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
2066  {
2067  return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
2068  }
2069 
2070  void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const
2072  {
2073  return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData );
2074  }
2075 
2076  void
2077  vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT
2078  {
2079  return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data );
2080  }
2081 
2082  void vkCmdClearColorImage( VkCommandBuffer commandBuffer,
2083  VkImage image,
2084  VkImageLayout imageLayout,
2085  const VkClearColorValue * pColor,
2086  uint32_t rangeCount,
2087  const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT
2088  {
2089  return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges );
2090  }
2091 
2092  void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer,
2093  VkImage image,
2094  VkImageLayout imageLayout,
2095  const VkClearDepthStencilValue * pDepthStencil,
2096  uint32_t rangeCount,
2097  const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT
2098  {
2099  return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges );
2100  }
2101 
2102  void vkCmdClearAttachments( VkCommandBuffer commandBuffer,
2103  uint32_t attachmentCount,
2104  const VkClearAttachment * pAttachments,
2105  uint32_t rectCount,
2106  const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT
2107  {
2108  return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects );
2109  }
2110 
2111  void vkCmdResolveImage( VkCommandBuffer commandBuffer,
2112  VkImage srcImage,
2113  VkImageLayout srcImageLayout,
2114  VkImage dstImage,
2115  VkImageLayout dstImageLayout,
2116  uint32_t regionCount,
2117  const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT
2118  {
2119  return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
2120  }
2121 
2122  void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
2123  {
2124  return ::vkCmdSetEvent( commandBuffer, event, stageMask );
2125  }
2126 
2127  void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
2128  {
2129  return ::vkCmdResetEvent( commandBuffer, event, stageMask );
2130  }
2131 
2132  void vkCmdWaitEvents( VkCommandBuffer commandBuffer,
2133  uint32_t eventCount,
2134  const VkEvent * pEvents,
2135  VkPipelineStageFlags srcStageMask,
2136  VkPipelineStageFlags dstStageMask,
2137  uint32_t memoryBarrierCount,
2138  const VkMemoryBarrier * pMemoryBarriers,
2139  uint32_t bufferMemoryBarrierCount,
2140  const VkBufferMemoryBarrier * pBufferMemoryBarriers,
2141  uint32_t imageMemoryBarrierCount,
2142  const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
2143  {
2144  return ::vkCmdWaitEvents( commandBuffer,
2145  eventCount,
2146  pEvents,
2147  srcStageMask,
2148  dstStageMask,
2149  memoryBarrierCount,
2150  pMemoryBarriers,
2151  bufferMemoryBarrierCount,
2152  pBufferMemoryBarriers,
2153  imageMemoryBarrierCount,
2154  pImageMemoryBarriers );
2155  }
2156 
2157  void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer,
2158  VkPipelineStageFlags srcStageMask,
2159  VkPipelineStageFlags dstStageMask,
2160  VkDependencyFlags dependencyFlags,
2161  uint32_t memoryBarrierCount,
2162  const VkMemoryBarrier * pMemoryBarriers,
2163  uint32_t bufferMemoryBarrierCount,
2164  const VkBufferMemoryBarrier * pBufferMemoryBarriers,
2165  uint32_t imageMemoryBarrierCount,
2166  const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
2167  {
2168  return ::vkCmdPipelineBarrier( commandBuffer,
2169  srcStageMask,
2170  dstStageMask,
2171  dependencyFlags,
2172  memoryBarrierCount,
2173  pMemoryBarriers,
2174  bufferMemoryBarrierCount,
2175  pBufferMemoryBarriers,
2176  imageMemoryBarrierCount,
2177  pImageMemoryBarriers );
2178  }
2179 
2180  void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
2181  {
2182  return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags );
2183  }
2184 
2185  void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
2186  {
2187  return ::vkCmdEndQuery( commandBuffer, queryPool, query );
2188  }
2189 
2190  void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
2191  {
2192  return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
2193  }
2194 
2195  void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer,
2196  VkPipelineStageFlagBits pipelineStage,
2197  VkQueryPool queryPool,
2198  uint32_t query ) const VULKAN_HPP_NOEXCEPT
2199  {
2200  return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
2201  }
2202 
2203  void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer,
2204  VkQueryPool queryPool,
2205  uint32_t firstQuery,
2206  uint32_t queryCount,
2207  VkBuffer dstBuffer,
2208  VkDeviceSize dstOffset,
2211  {
2212  return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
2213  }
2214 
2215  void vkCmdPushConstants( VkCommandBuffer commandBuffer,
2216  VkPipelineLayout layout,
2217  VkShaderStageFlags stageFlags,
2218  uint32_t offset,
2219  uint32_t size,
2220  const void * pValues ) const VULKAN_HPP_NOEXCEPT
2221  {
2222  return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues );
2223  }
2224 
2225  void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer,
2226  const VkRenderPassBeginInfo * pRenderPassBegin,
2227  VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
2228  {
2229  return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents );
2230  }
2231 
2232  void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
2233  {
2234  return ::vkCmdNextSubpass( commandBuffer, contents );
2235  }
2236 
2237  void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
2238  {
2239  return ::vkCmdEndRenderPass( commandBuffer );
2240  }
2241 
2242  void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
2243  {
2244  return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers );
2245  }
2246 
2247  //=== VK_VERSION_1_1 ===
2248 
2250  {
2251  return ::vkEnumerateInstanceVersion( pApiVersion );
2252  }
2253 
2254  VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
2255  {
2256  return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
2257  }
2258 
2259  VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
2260  {
2261  return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
2262  }
2263 
2264  void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device,
2265  uint32_t heapIndex,
2266  uint32_t localDeviceIndex,
2267  uint32_t remoteDeviceIndex,
2268  VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
2269  {
2270  return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
2271  }
2272 
2273  void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
2274  {
2275  return ::vkCmdSetDeviceMask( commandBuffer, deviceMask );
2276  }
2277 
2278  void vkCmdDispatchBase( VkCommandBuffer commandBuffer,
2279  uint32_t baseGroupX,
2280  uint32_t baseGroupY,
2281  uint32_t baseGroupZ,
2282  uint32_t groupCountX,
2283  uint32_t groupCountY,
2284  uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
2285  {
2286  return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
2287  }
2288 
2290  uint32_t * pPhysicalDeviceGroupCount,
2291  VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
2292  {
2293  return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
2294  }
2295 
2296  void vkGetImageMemoryRequirements2( VkDevice device,
2297  const VkImageMemoryRequirementsInfo2 * pInfo,
2298  VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
2299  {
2300  return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements );
2301  }
2302 
2303  void vkGetBufferMemoryRequirements2( VkDevice device,
2304  const VkBufferMemoryRequirementsInfo2 * pInfo,
2305  VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
2306  {
2307  return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements );
2308  }
2309 
2312  uint32_t * pSparseMemoryRequirementCount,
2313  VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
2314  {
2315  return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
2316  }
2317 
2318  void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
2319  {
2320  return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
2321  }
2322 
2323  void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
2324  {
2325  return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
2326  }
2327 
2328  void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice,
2329  VkFormat format,
2330  VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
2331  {
2332  return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties );
2333  }
2334 
2335  VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice,
2336  const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
2337  VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
2338  {
2339  return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
2340  }
2341 
2342  void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice,
2343  uint32_t * pQueueFamilyPropertyCount,
2344  VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
2345  {
2346  return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
2347  }
2348 
2349  void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice,
2350  VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
2351  {
2352  return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
2353  }
2354 
2355  void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice,
2356  const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
2357  uint32_t * pPropertyCount,
2359  {
2360  return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
2361  }
2362 
2363  void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
2364  {
2365  return ::vkTrimCommandPool( device, commandPool, flags );
2366  }
2367 
2368  void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
2369  {
2370  return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue );
2371  }
2372 
2374  const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,
2375  const VkAllocationCallbacks * pAllocator,
2376  VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
2377  {
2378  return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion );
2379  }
2380 
2381  void vkDestroySamplerYcbcrConversion( VkDevice device,
2382  VkSamplerYcbcrConversion ycbcrConversion,
2383  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
2384  {
2385  return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator );
2386  }
2387 
2389  const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,
2390  const VkAllocationCallbacks * pAllocator,
2391  VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
2392  {
2393  return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
2394  }
2395 
2396  void vkDestroyDescriptorUpdateTemplate( VkDevice device,
2397  VkDescriptorUpdateTemplate descriptorUpdateTemplate,
2398  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
2399  {
2400  return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator );
2401  }
2402 
2403  void vkUpdateDescriptorSetWithTemplate( VkDevice device,
2404  VkDescriptorSet descriptorSet,
2405  VkDescriptorUpdateTemplate descriptorUpdateTemplate,
2406  const void * pData ) const VULKAN_HPP_NOEXCEPT
2407  {
2408  return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData );
2409  }
2410 
2411  void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice,
2412  const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
2413  VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
2414  {
2415  return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
2416  }
2417 
2418  void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice,
2419  const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
2420  VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
2421  {
2422  return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
2423  }
2424 
2425  void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice,
2426  const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
2427  VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
2428  {
2429  return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
2430  }
2431 
2432  void vkGetDescriptorSetLayoutSupport( VkDevice device,
2433  const VkDescriptorSetLayoutCreateInfo * pCreateInfo,
2435  {
2436  return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport );
2437  }
2438 
2439  //=== VK_VERSION_1_2 ===
2440 
2441  void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer,
2442  VkBuffer buffer,
2444  VkBuffer countBuffer,
2445  VkDeviceSize countBufferOffset,
2446  uint32_t maxDrawCount,
2447  uint32_t stride ) const VULKAN_HPP_NOEXCEPT
2448  {
2449  return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
2450  }
2451 
2452  void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer,
2453  VkBuffer buffer,
2455  VkBuffer countBuffer,
2456  VkDeviceSize countBufferOffset,
2457  uint32_t maxDrawCount,
2458  uint32_t stride ) const VULKAN_HPP_NOEXCEPT
2459  {
2460  return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
2461  }
2462 
2463  VkResult vkCreateRenderPass2( VkDevice device,
2464  const VkRenderPassCreateInfo2 * pCreateInfo,
2465  const VkAllocationCallbacks * pAllocator,
2466  VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT
2467  {
2468  return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass );
2469  }
2470 
2471  void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer,
2472  const VkRenderPassBeginInfo * pRenderPassBegin,
2473  const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
2474  {
2475  return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
2476  }
2477 
2478  void vkCmdNextSubpass2( VkCommandBuffer commandBuffer,
2479  const VkSubpassBeginInfo * pSubpassBeginInfo,
2480  const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
2481  {
2482  return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
2483  }
2484 
2485  void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
2486  {
2487  return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo );
2488  }
2489 
2490  void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
2491  {
2492  return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
2493  }
2494 
2495  VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
2496  {
2497  return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
2498  }
2499 
2500  VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
2501  {
2502  return ::vkWaitSemaphores( device, pWaitInfo, timeout );
2503  }
2504 
2505  VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
2506  {
2507  return ::vkSignalSemaphore( device, pSignalInfo );
2508  }
2509 
2511  {
2512  return ::vkGetBufferDeviceAddress( device, pInfo );
2513  }
2514 
2515  uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
2516  {
2518  }
2519 
2521  {
2523  }
2524 
2525  //=== VK_VERSION_1_3 ===
2526 
2527  VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice,
2528  uint32_t * pToolCount,
2529  VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
2530  {
2531  return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties );
2532  }
2533 
2535  const VkPrivateDataSlotCreateInfo * pCreateInfo,
2536  const VkAllocationCallbacks * pAllocator,
2537  VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
2538  {
2539  return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot );
2540  }
2541 
2542  void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
2543  {
2544  return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator );
2545  }
2546 
2547  VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const
2549  {
2550  return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data );
2551  }
2552 
2553  void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const
2555  {
2556  return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData );
2557  }
2558 
2559  void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
2560  {
2561  return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo );
2562  }
2563 
2564  void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
2565  {
2566  return ::vkCmdResetEvent2( commandBuffer, event, stageMask );
2567  }
2568 
2569  void vkCmdWaitEvents2( VkCommandBuffer commandBuffer,
2570  uint32_t eventCount,
2571  const VkEvent * pEvents,
2572  const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT
2573  {
2574  return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos );
2575  }
2576 
2577  void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
2578  {
2579  return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo );
2580  }
2581 
2582  void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
2583  {
2584  return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query );
2585  }
2586 
2587  VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
2588  {
2589  return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence );
2590  }
2591 
2592  void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
2593  {
2594  return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo );
2595  }
2596 
2597  void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
2598  {
2599  return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo );
2600  }
2601 
2602  void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
2603  {
2604  return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo );
2605  }
2606 
2607  void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
2608  {
2609  return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo );
2610  }
2611 
2612  void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
2613  {
2614  return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo );
2615  }
2616 
2617  void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
2618  {
2619  return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo );
2620  }
2621 
2622  void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
2623  {
2624  return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo );
2625  }
2626 
2627  void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
2628  {
2629  return ::vkCmdEndRendering( commandBuffer );
2630  }
2631 
2632  void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
2633  {
2634  return ::vkCmdSetCullMode( commandBuffer, cullMode );
2635  }
2636 
2637  void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
2638  {
2639  return ::vkCmdSetFrontFace( commandBuffer, frontFace );
2640  }
2641 
2642  void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
2643  {
2644  return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology );
2645  }
2646 
2647  void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
2648  {
2649  return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports );
2650  }
2651 
2652  void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
2653  {
2654  return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors );
2655  }
2656 
2657  void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer,
2658  uint32_t firstBinding,
2659  uint32_t bindingCount,
2660  const VkBuffer * pBuffers,
2661  const VkDeviceSize * pOffsets,
2662  const VkDeviceSize * pSizes,
2663  const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT
2664  {
2665  return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
2666  }
2667 
2668  void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
2669  {
2670  return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable );
2671  }
2672 
2673  void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
2674  {
2675  return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable );
2676  }
2677 
2678  void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
2679  {
2680  return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp );
2681  }
2682 
2683  void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
2684  {
2685  return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable );
2686  }
2687 
2688  void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
2689  {
2690  return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable );
2691  }
2692 
2693  void vkCmdSetStencilOp( VkCommandBuffer commandBuffer,
2694  VkStencilFaceFlags faceMask,
2695  VkStencilOp failOp,
2696  VkStencilOp passOp,
2697  VkStencilOp depthFailOp,
2698  VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
2699  {
2700  return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
2701  }
2702 
2703  void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
2704  {
2705  return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable );
2706  }
2707 
2708  void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
2709  {
2710  return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable );
2711  }
2712 
2713  void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
2714  {
2715  return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable );
2716  }
2717 
2719  const VkDeviceBufferMemoryRequirements * pInfo,
2720  VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
2721  {
2722  return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements );
2723  }
2724 
2725  void vkGetDeviceImageMemoryRequirements( VkDevice device,
2726  const VkDeviceImageMemoryRequirements * pInfo,
2727  VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
2728  {
2729  return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements );
2730  }
2731 
2733  const VkDeviceImageMemoryRequirements * pInfo,
2734  uint32_t * pSparseMemoryRequirementCount,
2735  VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
2736  {
2737  return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
2738  }
2739 
2740  //=== VK_KHR_surface ===
2741 
2742  void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
2743  {
2744  return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
2745  }
2746 
2747  VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice,
2748  uint32_t queueFamilyIndex,
2749  VkSurfaceKHR surface,
2750  VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT
2751  {
2752  return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
2753  }
2754 
2755  VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice,
2756  VkSurfaceKHR surface,
2757  VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
2758  {
2759  return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
2760  }
2761 
2762  VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice,
2763  VkSurfaceKHR surface,
2764  uint32_t * pSurfaceFormatCount,
2765  VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
2766  {
2767  return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats );
2768  }
2769 
2770  VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice,
2771  VkSurfaceKHR surface,
2772  uint32_t * pPresentModeCount,
2773  VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT
2774  {
2775  return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes );
2776  }
2777 
2778  //=== VK_KHR_swapchain ===
2779 
2780  VkResult vkCreateSwapchainKHR( VkDevice device,
2781  const VkSwapchainCreateInfoKHR * pCreateInfo,
2782  const VkAllocationCallbacks * pAllocator,
2783  VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT
2784  {
2785  return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain );
2786  }
2787 
2788  void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
2789  {
2790  return ::vkDestroySwapchainKHR( device, swapchain, pAllocator );
2791  }
2792 
2794  VkSwapchainKHR swapchain,
2795  uint32_t * pSwapchainImageCount,
2796  VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT
2797  {
2798  return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
2799  }
2800 
2802  VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
2803  {
2804  return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
2805  }
2806 
2808  {
2809  return ::vkQueuePresentKHR( queue, pPresentInfo );
2810  }
2811 
2813  VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT
2814  {
2815  return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities );
2816  }
2817 
2818  VkResult
2820  {
2821  return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes );
2822  }
2823 
2824  VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice,
2825  VkSurfaceKHR surface,
2826  uint32_t * pRectCount,
2827  VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT
2828  {
2829  return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
2830  }
2831 
2832  VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
2833  {
2834  return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
2835  }
2836 
2837  //=== VK_KHR_display ===
2838 
2839  VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice,
2840  uint32_t * pPropertyCount,
2841  VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
2842  {
2843  return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties );
2844  }
2845 
2846  VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice,
2847  uint32_t * pPropertyCount,
2848  VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
2849  {
2850  return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
2851  }
2852 
2853  VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice,
2854  uint32_t planeIndex,
2855  uint32_t * pDisplayCount,
2856  VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT
2857  {
2858  return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays );
2859  }
2860 
2861  VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice,
2862  VkDisplayKHR display,
2863  uint32_t * pPropertyCount,
2864  VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
2865  {
2866  return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties );
2867  }
2868 
2869  VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice,
2870  VkDisplayKHR display,
2871  const VkDisplayModeCreateInfoKHR * pCreateInfo,
2872  const VkAllocationCallbacks * pAllocator,
2873  VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT
2874  {
2875  return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode );
2876  }
2877 
2878  VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice,
2879  VkDisplayModeKHR mode,
2880  uint32_t planeIndex,
2881  VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
2882  {
2883  return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities );
2884  }
2885 
2887  const VkDisplaySurfaceCreateInfoKHR * pCreateInfo,
2888  const VkAllocationCallbacks * pAllocator,
2889  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
2890  {
2891  return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
2892  }
2893 
2894  //=== VK_KHR_display_swapchain ===
2895 
2897  uint32_t swapchainCount,
2898  const VkSwapchainCreateInfoKHR * pCreateInfos,
2899  const VkAllocationCallbacks * pAllocator,
2900  VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT
2901  {
2902  return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains );
2903  }
2904 
2905 # if defined( VK_USE_PLATFORM_XLIB_KHR )
2906  //=== VK_KHR_xlib_surface ===
2907 
2908  VkResult vkCreateXlibSurfaceKHR( VkInstance instance,
2909  const VkXlibSurfaceCreateInfoKHR * pCreateInfo,
2910  const VkAllocationCallbacks * pAllocator,
2911  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
2912  {
2913  return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
2914  }
2915 
2916  VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice,
2917  uint32_t queueFamilyIndex,
2918  Display * dpy,
2919  VisualID visualID ) const VULKAN_HPP_NOEXCEPT
2920  {
2921  return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID );
2922  }
2923 # endif /*VK_USE_PLATFORM_XLIB_KHR*/
2924 
2925 # if defined( VK_USE_PLATFORM_XCB_KHR )
2926  //=== VK_KHR_xcb_surface ===
2927 
2928  VkResult vkCreateXcbSurfaceKHR( VkInstance instance,
2929  const VkXcbSurfaceCreateInfoKHR * pCreateInfo,
2930  const VkAllocationCallbacks * pAllocator,
2931  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
2932  {
2933  return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
2934  }
2935 
2936  VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice,
2937  uint32_t queueFamilyIndex,
2938  xcb_connection_t * connection,
2939  xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
2940  {
2941  return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id );
2942  }
2943 # endif /*VK_USE_PLATFORM_XCB_KHR*/
2944 
2945 # if defined( VK_USE_PLATFORM_WAYLAND_KHR )
2946  //=== VK_KHR_wayland_surface ===
2947 
2948  VkResult vkCreateWaylandSurfaceKHR( VkInstance instance,
2949  const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,
2950  const VkAllocationCallbacks * pAllocator,
2951  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
2952  {
2953  return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
2954  }
2955 
2956  VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice,
2957  uint32_t queueFamilyIndex,
2958  struct wl_display * display ) const VULKAN_HPP_NOEXCEPT
2959  {
2960  return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display );
2961  }
2962 # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
2963 
2964 # if defined( VK_USE_PLATFORM_ANDROID_KHR )
2965  //=== VK_KHR_android_surface ===
2966 
2967  VkResult vkCreateAndroidSurfaceKHR( VkInstance instance,
2968  const VkAndroidSurfaceCreateInfoKHR * pCreateInfo,
2969  const VkAllocationCallbacks * pAllocator,
2970  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
2971  {
2972  return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
2973  }
2974 # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
2975 
2976 # if defined( VK_USE_PLATFORM_WIN32_KHR )
2977  //=== VK_KHR_win32_surface ===
2978 
2979  VkResult vkCreateWin32SurfaceKHR( VkInstance instance,
2980  const VkWin32SurfaceCreateInfoKHR * pCreateInfo,
2981  const VkAllocationCallbacks * pAllocator,
2982  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
2983  {
2984  return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
2985  }
2986 
2987  VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
2988  {
2989  return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
2990  }
2991 # endif /*VK_USE_PLATFORM_WIN32_KHR*/
2992 
2993  //=== VK_EXT_debug_report ===
2994 
2996  const VkDebugReportCallbackCreateInfoEXT * pCreateInfo,
2997  const VkAllocationCallbacks * pAllocator,
2998  VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT
2999  {
3000  return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
3001  }
3002 
3003  void vkDestroyDebugReportCallbackEXT( VkInstance instance,
3004  VkDebugReportCallbackEXT callback,
3005  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
3006  {
3007  return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator );
3008  }
3009 
3010  void vkDebugReportMessageEXT( VkInstance instance,
3012  VkDebugReportObjectTypeEXT objectType,
3013  uint64_t object,
3014  size_t location,
3015  int32_t messageCode,
3016  const char * pLayerPrefix,
3017  const char * pMessage ) const VULKAN_HPP_NOEXCEPT
3018  {
3019  return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
3020  }
3021 
3022  //=== VK_EXT_debug_marker ===
3023 
3025  {
3026  return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo );
3027  }
3028 
3030  {
3031  return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo );
3032  }
3033 
3034  void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
3035  {
3036  return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo );
3037  }
3038 
3039  void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
3040  {
3041  return ::vkCmdDebugMarkerEndEXT( commandBuffer );
3042  }
3043 
3044  void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
3045  {
3046  return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo );
3047  }
3048 
3049 # if defined( VK_ENABLE_BETA_EXTENSIONS )
3050  //=== VK_KHR_video_queue ===
3051 
3052  VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice,
3053  const VkVideoProfileInfoKHR * pVideoProfile,
3054  VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
3055  {
3056  return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities );
3057  }
3058 
3059  VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice,
3060  const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
3061  uint32_t * pVideoFormatPropertyCount,
3062  VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT
3063  {
3064  return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties );
3065  }
3066 
3067  VkResult vkCreateVideoSessionKHR( VkDevice device,
3068  const VkVideoSessionCreateInfoKHR * pCreateInfo,
3069  const VkAllocationCallbacks * pAllocator,
3070  VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT
3071  {
3072  return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession );
3073  }
3074 
3075  void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
3076  {
3077  return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator );
3078  }
3079 
3081  VkVideoSessionKHR videoSession,
3082  uint32_t * pMemoryRequirementsCount,
3083  VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
3084  {
3085  return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements );
3086  }
3087 
3088  VkResult vkBindVideoSessionMemoryKHR( VkDevice device,
3089  VkVideoSessionKHR videoSession,
3090  uint32_t bindSessionMemoryInfoCount,
3091  const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT
3092  {
3093  return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos );
3094  }
3095 
3097  const VkVideoSessionParametersCreateInfoKHR * pCreateInfo,
3098  const VkAllocationCallbacks * pAllocator,
3099  VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT
3100  {
3101  return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters );
3102  }
3103 
3105  VkVideoSessionParametersKHR videoSessionParameters,
3107  {
3108  return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo );
3109  }
3110 
3111  void vkDestroyVideoSessionParametersKHR( VkDevice device,
3112  VkVideoSessionParametersKHR videoSessionParameters,
3113  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
3114  {
3115  return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator );
3116  }
3117 
3118  void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
3119  {
3120  return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo );
3121  }
3122 
3123  void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT
3124  {
3125  return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo );
3126  }
3127 
3128  void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT
3129  {
3130  return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo );
3131  }
3132 # endif /*VK_ENABLE_BETA_EXTENSIONS*/
3133 
3134 # if defined( VK_ENABLE_BETA_EXTENSIONS )
3135  //=== VK_KHR_video_decode_queue ===
3136 
3137  void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT
3138  {
3139  return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo );
3140  }
3141 # endif /*VK_ENABLE_BETA_EXTENSIONS*/
3142 
3143  //=== VK_EXT_transform_feedback ===
3144 
3145  void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer,
3146  uint32_t firstBinding,
3147  uint32_t bindingCount,
3148  const VkBuffer * pBuffers,
3149  const VkDeviceSize * pOffsets,
3150  const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT
3151  {
3152  return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes );
3153  }
3154 
3155  void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer,
3156  uint32_t firstCounterBuffer,
3157  uint32_t counterBufferCount,
3158  const VkBuffer * pCounterBuffers,
3159  const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
3160  {
3161  return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
3162  }
3163 
3164  void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer,
3165  uint32_t firstCounterBuffer,
3166  uint32_t counterBufferCount,
3167  const VkBuffer * pCounterBuffers,
3168  const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
3169  {
3170  return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
3171  }
3172 
3173  void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const
3175  {
3176  return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index );
3177  }
3178 
3179  void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
3180  {
3181  return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index );
3182  }
3183 
3184  void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer,
3185  uint32_t instanceCount,
3186  uint32_t firstInstance,
3187  VkBuffer counterBuffer,
3188  VkDeviceSize counterBufferOffset,
3189  uint32_t counterOffset,
3190  uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
3191  {
3192  return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
3193  }
3194 
3195  //=== VK_NVX_binary_import ===
3196 
3197  VkResult vkCreateCuModuleNVX( VkDevice device,
3198  const VkCuModuleCreateInfoNVX * pCreateInfo,
3199  const VkAllocationCallbacks * pAllocator,
3200  VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT
3201  {
3202  return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule );
3203  }
3204 
3206  const VkCuFunctionCreateInfoNVX * pCreateInfo,
3207  const VkAllocationCallbacks * pAllocator,
3208  VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT
3209  {
3210  return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction );
3211  }
3212 
3213  void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
3214  {
3215  return ::vkDestroyCuModuleNVX( device, module, pAllocator );
3216  }
3217 
3218  void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
3219  {
3220  return ::vkDestroyCuFunctionNVX( device, function, pAllocator );
3221  }
3222 
3223  void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT
3224  {
3225  return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo );
3226  }
3227 
3228  //=== VK_NVX_image_view_handle ===
3229 
3230  uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT
3231  {
3232  return ::vkGetImageViewHandleNVX( device, pInfo );
3233  }
3234 
3235  VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT
3236  {
3237  return ::vkGetImageViewAddressNVX( device, imageView, pProperties );
3238  }
3239 
3240  //=== VK_AMD_draw_indirect_count ===
3241 
3242  void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer,
3243  VkBuffer buffer,
3245  VkBuffer countBuffer,
3246  VkDeviceSize countBufferOffset,
3247  uint32_t maxDrawCount,
3248  uint32_t stride ) const VULKAN_HPP_NOEXCEPT
3249  {
3250  return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
3251  }
3252 
3253  void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer,
3254  VkBuffer buffer,
3256  VkBuffer countBuffer,
3257  VkDeviceSize countBufferOffset,
3258  uint32_t maxDrawCount,
3259  uint32_t stride ) const VULKAN_HPP_NOEXCEPT
3260  {
3261  return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
3262  }
3263 
3264  //=== VK_AMD_shader_info ===
3265 
3266  VkResult vkGetShaderInfoAMD( VkDevice device,
3267  VkPipeline pipeline,
3268  VkShaderStageFlagBits shaderStage,
3269  VkShaderInfoTypeAMD infoType,
3270  size_t * pInfoSize,
3271  void * pInfo ) const VULKAN_HPP_NOEXCEPT
3272  {
3273  return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo );
3274  }
3275 
3276  //=== VK_KHR_dynamic_rendering ===
3277 
3278  void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
3279  {
3280  return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo );
3281  }
3282 
3283  void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
3284  {
3285  return ::vkCmdEndRenderingKHR( commandBuffer );
3286  }
3287 
3288 # if defined( VK_USE_PLATFORM_GGP )
3289  //=== VK_GGP_stream_descriptor_surface ===
3290 
3291  VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance,
3292  const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
3293  const VkAllocationCallbacks * pAllocator,
3294  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
3295  {
3296  return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
3297  }
3298 # endif /*VK_USE_PLATFORM_GGP*/
3299 
3300  //=== VK_NV_external_memory_capabilities ===
3301 
3303  VkFormat format,
3304  VkImageType type,
3305  VkImageTiling tiling,
3308  VkExternalMemoryHandleTypeFlagsNV externalHandleType,
3309  VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
3310  {
3312  physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
3313  }
3314 
3315 # if defined( VK_USE_PLATFORM_WIN32_KHR )
3316  //=== VK_NV_external_memory_win32 ===
3317 
3318  VkResult vkGetMemoryWin32HandleNV( VkDevice device,
3319  VkDeviceMemory memory,
3321  HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
3322  {
3324  }
3325 # endif /*VK_USE_PLATFORM_WIN32_KHR*/
3326 
3327  //=== VK_KHR_get_physical_device_properties2 ===
3328 
3329  void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
3330  {
3331  return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
3332  }
3333 
3334  void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
3335  {
3336  return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
3337  }
3338 
3339  void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice,
3340  VkFormat format,
3341  VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
3342  {
3343  return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties );
3344  }
3345 
3346  VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice,
3347  const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
3348  VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
3349  {
3350  return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
3351  }
3352 
3353  void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice,
3354  uint32_t * pQueueFamilyPropertyCount,
3355  VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
3356  {
3357  return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
3358  }
3359 
3360  void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice,
3361  VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
3362  {
3363  return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
3364  }
3365 
3366  void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice,
3367  const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
3368  uint32_t * pPropertyCount,
3370  {
3371  return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
3372  }
3373 
3374  //=== VK_KHR_device_group ===
3375 
3377  uint32_t heapIndex,
3378  uint32_t localDeviceIndex,
3379  uint32_t remoteDeviceIndex,
3380  VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
3381  {
3382  return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
3383  }
3384 
3385  void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
3386  {
3387  return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask );
3388  }
3389 
3390  void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer,
3391  uint32_t baseGroupX,
3392  uint32_t baseGroupY,
3393  uint32_t baseGroupZ,
3394  uint32_t groupCountX,
3395  uint32_t groupCountY,
3396  uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
3397  {
3398  return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
3399  }
3400 
3401 # if defined( VK_USE_PLATFORM_VI_NN )
3402  //=== VK_NN_vi_surface ===
3403 
3404  VkResult vkCreateViSurfaceNN( VkInstance instance,
3405  const VkViSurfaceCreateInfoNN * pCreateInfo,
3406  const VkAllocationCallbacks * pAllocator,
3407  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
3408  {
3409  return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface );
3410  }
3411 # endif /*VK_USE_PLATFORM_VI_NN*/
3412 
3413  //=== VK_KHR_maintenance1 ===
3414 
3415  void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
3416  {
3417  return ::vkTrimCommandPoolKHR( device, commandPool, flags );
3418  }
3419 
3420  //=== VK_KHR_device_group_creation ===
3421 
3423  uint32_t * pPhysicalDeviceGroupCount,
3424  VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
3425  {
3426  return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
3427  }
3428 
3429  //=== VK_KHR_external_memory_capabilities ===
3430 
3431  void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice,
3432  const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
3433  VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
3434  {
3435  return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
3436  }
3437 
3438 # if defined( VK_USE_PLATFORM_WIN32_KHR )
3439  //=== VK_KHR_external_memory_win32 ===
3440 
3441  VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
3442  {
3443  return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
3444  }
3445 
3448  HANDLE handle,
3449  VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT
3450  {
3451  return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties );
3452  }
3453 # endif /*VK_USE_PLATFORM_WIN32_KHR*/
3454 
3455  //=== VK_KHR_external_memory_fd ===
3456 
3457  VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
3458  {
3459  return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd );
3460  }
3461 
3464  int fd,
3465  VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT
3466  {
3467  return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties );
3468  }
3469 
3470  //=== VK_KHR_external_semaphore_capabilities ===
3471 
3472  void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice,
3473  const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
3474  VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
3475  {
3476  return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
3477  }
3478 
3479 # if defined( VK_USE_PLATFORM_WIN32_KHR )
3480  //=== VK_KHR_external_semaphore_win32 ===
3481 
3482  VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device,
3483  const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
3484  {
3485  return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
3486  }
3487 
3488  VkResult
3489  vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
3490  {
3491  return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
3492  }
3493 # endif /*VK_USE_PLATFORM_WIN32_KHR*/
3494 
3495  //=== VK_KHR_external_semaphore_fd ===
3496 
3497  VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
3498  {
3499  return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
3500  }
3501 
3502  VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
3503  {
3504  return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
3505  }
3506 
3507  //=== VK_KHR_push_descriptor ===
3508 
3509  void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer,
3510  VkPipelineBindPoint pipelineBindPoint,
3511  VkPipelineLayout layout,
3512  uint32_t set,
3513  uint32_t descriptorWriteCount,
3514  const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT
3515  {
3516  return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites );
3517  }
3518 
3519  void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer,
3520  VkDescriptorUpdateTemplate descriptorUpdateTemplate,
3521  VkPipelineLayout layout,
3522  uint32_t set,
3523  const void * pData ) const VULKAN_HPP_NOEXCEPT
3524  {
3525  return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData );
3526  }
3527 
3528  //=== VK_EXT_conditional_rendering ===
3529 
3530  void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer,
3531  const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
3532  {
3533  return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin );
3534  }
3535 
3536  void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
3537  {
3539  }
3540 
3541  //=== VK_KHR_descriptor_update_template ===
3542 
3544  const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,
3545  const VkAllocationCallbacks * pAllocator,
3546  VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
3547  {
3548  return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
3549  }
3550 
3552  VkDescriptorUpdateTemplate descriptorUpdateTemplate,
3553  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
3554  {
3555  return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator );
3556  }
3557 
3559  VkDescriptorSet descriptorSet,
3560  VkDescriptorUpdateTemplate descriptorUpdateTemplate,
3561  const void * pData ) const VULKAN_HPP_NOEXCEPT
3562  {
3563  return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData );
3564  }
3565 
3566  //=== VK_NV_clip_space_w_scaling ===
3567 
3568  void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer,
3569  uint32_t firstViewport,
3570  uint32_t viewportCount,
3571  const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT
3572  {
3573  return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings );
3574  }
3575 
3576  //=== VK_EXT_direct_mode_display ===
3577 
3578  VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
3579  {
3580  return ::vkReleaseDisplayEXT( physicalDevice, display );
3581  }
3582 
3583 # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
3584  //=== VK_EXT_acquire_xlib_display ===
3585 
3586  VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
3587  {
3588  return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
3589  }
3590 
3591  VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT
3592  {
3593  return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
3594  }
3595 # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
3596 
3597  //=== VK_EXT_display_surface_counter ===
3598 
3599  VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice,
3600  VkSurfaceKHR surface,
3601  VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
3602  {
3603  return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
3604  }
3605 
3606  //=== VK_EXT_display_control ===
3607 
3608  VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT
3609  {
3610  return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo );
3611  }
3612 
3614  const VkDeviceEventInfoEXT * pDeviceEventInfo,
3615  const VkAllocationCallbacks * pAllocator,
3616  VkFence * pFence ) const VULKAN_HPP_NOEXCEPT
3617  {
3618  return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence );
3619  }
3620 
3622  VkDisplayKHR display,
3623  const VkDisplayEventInfoEXT * pDisplayEventInfo,
3624  const VkAllocationCallbacks * pAllocator,
3625  VkFence * pFence ) const VULKAN_HPP_NOEXCEPT
3626  {
3627  return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence );
3628  }
3629 
3631  VkSwapchainKHR swapchain,
3633  uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT
3634  {
3635  return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue );
3636  }
3637 
3638  //=== VK_GOOGLE_display_timing ===
3639 
3641  VkSwapchainKHR swapchain,
3642  VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT
3643  {
3644  return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
3645  }
3646 
3648  VkSwapchainKHR swapchain,
3649  uint32_t * pPresentationTimingCount,
3650  VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT
3651  {
3652  return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings );
3653  }
3654 
3655  //=== VK_EXT_discard_rectangles ===
3656 
3657  void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer,
3658  uint32_t firstDiscardRectangle,
3659  uint32_t discardRectangleCount,
3660  const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT
3661  {
3662  return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles );
3663  }
3664 
3665  //=== VK_EXT_hdr_metadata ===
3666 
3667  void vkSetHdrMetadataEXT( VkDevice device,
3668  uint32_t swapchainCount,
3669  const VkSwapchainKHR * pSwapchains,
3670  const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT
3671  {
3672  return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata );
3673  }
3674 
3675  //=== VK_KHR_create_renderpass2 ===
3676 
3678  const VkRenderPassCreateInfo2 * pCreateInfo,
3679  const VkAllocationCallbacks * pAllocator,
3680  VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT
3681  {
3682  return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass );
3683  }
3684 
3685  void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer,
3686  const VkRenderPassBeginInfo * pRenderPassBegin,
3687  const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
3688  {
3689  return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
3690  }
3691 
3692  void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer,
3693  const VkSubpassBeginInfo * pSubpassBeginInfo,
3694  const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
3695  {
3696  return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
3697  }
3698 
3699  void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
3700  {
3701  return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo );
3702  }
3703 
3704  //=== VK_KHR_shared_presentable_image ===
3705 
3706  VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
3707  {
3708  return ::vkGetSwapchainStatusKHR( device, swapchain );
3709  }
3710 
3711  //=== VK_KHR_external_fence_capabilities ===
3712 
3713  void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice,
3714  const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
3715  VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
3716  {
3717  return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
3718  }
3719 
3720 # if defined( VK_USE_PLATFORM_WIN32_KHR )
3721  //=== VK_KHR_external_fence_win32 ===
3722 
3723  VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
3724  {
3725  return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
3726  }
3727 
3728  VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
3729  {
3730  return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
3731  }
3732 # endif /*VK_USE_PLATFORM_WIN32_KHR*/
3733 
3734  //=== VK_KHR_external_fence_fd ===
3735 
3736  VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
3737  {
3738  return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
3739  }
3740 
3741  VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
3742  {
3743  return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd );
3744  }
3745 
3746  //=== VK_KHR_performance_query ===
3747 
3748  VkResult
3750  uint32_t queueFamilyIndex,
3751  uint32_t * pCounterCount,
3752  VkPerformanceCounterKHR * pCounters,
3753  VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
3754  {
3756  physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
3757  }
3758 
3759  void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice,
3760  const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
3761  uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT
3762  {
3763  return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
3764  }
3765 
3767  {
3768  return ::vkAcquireProfilingLockKHR( device, pInfo );
3769  }
3770 
3771  void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
3772  {
3774  }
3775 
3776  //=== VK_KHR_get_surface_capabilities2 ===
3777 
3778  VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice,
3779  const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
3780  VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
3781  {
3782  return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
3783  }
3784 
3785  VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice,
3786  const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
3787  uint32_t * pSurfaceFormatCount,
3788  VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
3789  {
3790  return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
3791  }
3792 
3793  //=== VK_KHR_get_display_properties2 ===
3794 
3795  VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice,
3796  uint32_t * pPropertyCount,
3797  VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
3798  {
3799  return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties );
3800  }
3801 
3803  uint32_t * pPropertyCount,
3805  {
3806  return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
3807  }
3808 
3809  VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice,
3810  VkDisplayKHR display,
3811  uint32_t * pPropertyCount,
3812  VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
3813  {
3814  return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
3815  }
3816 
3817  VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice,
3818  const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo,
3819  VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
3820  {
3821  return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
3822  }
3823 
3824 # if defined( VK_USE_PLATFORM_IOS_MVK )
3825  //=== VK_MVK_ios_surface ===
3826 
3827  VkResult vkCreateIOSSurfaceMVK( VkInstance instance,
3828  const VkIOSSurfaceCreateInfoMVK * pCreateInfo,
3829  const VkAllocationCallbacks * pAllocator,
3830  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
3831  {
3832  return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
3833  }
3834 # endif /*VK_USE_PLATFORM_IOS_MVK*/
3835 
3836 # if defined( VK_USE_PLATFORM_MACOS_MVK )
3837  //=== VK_MVK_macos_surface ===
3838 
3839  VkResult vkCreateMacOSSurfaceMVK( VkInstance instance,
3840  const VkMacOSSurfaceCreateInfoMVK * pCreateInfo,
3841  const VkAllocationCallbacks * pAllocator,
3842  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
3843  {
3844  return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
3845  }
3846 # endif /*VK_USE_PLATFORM_MACOS_MVK*/
3847 
3848  //=== VK_EXT_debug_utils ===
3849 
3851  {
3852  return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
3853  }
3854 
3856  {
3857  return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
3858  }
3859 
3861  {
3863  }
3864 
3866  {
3868  }
3869 
3871  {
3873  }
3874 
3875  void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
3876  {
3877  return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
3878  }
3879 
3880  void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
3881  {
3882  return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer );
3883  }
3884 
3885  void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
3886  {
3887  return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
3888  }
3889 
3891  const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo,
3892  const VkAllocationCallbacks * pAllocator,
3893  VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT
3894  {
3895  return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
3896  }
3897 
3898  void vkDestroyDebugUtilsMessengerEXT( VkInstance instance,
3899  VkDebugUtilsMessengerEXT messenger,
3900  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
3901  {
3902  return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator );
3903  }
3904 
3905  void vkSubmitDebugUtilsMessageEXT( VkInstance instance,
3907  VkDebugUtilsMessageTypeFlagsEXT messageTypes,
3908  const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT
3909  {
3910  return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
3911  }
3912 
3913 # if defined( VK_USE_PLATFORM_ANDROID_KHR )
3914  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
3915 
3917  const struct AHardwareBuffer * buffer,
3919  {
3921  }
3922 
3925  struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT
3926  {
3927  return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer );
3928  }
3929 # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
3930 
3931  //=== VK_EXT_sample_locations ===
3932 
3933  void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
3934  {
3935  return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo );
3936  }
3937 
3938  void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice,
3940  VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT
3941  {
3942  return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
3943  }
3944 
3945  //=== VK_KHR_get_memory_requirements2 ===
3946 
3947  void vkGetImageMemoryRequirements2KHR( VkDevice device,
3948  const VkImageMemoryRequirementsInfo2 * pInfo,
3949  VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
3950  {
3951  return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
3952  }
3953 
3954  void vkGetBufferMemoryRequirements2KHR( VkDevice device,
3955  const VkBufferMemoryRequirementsInfo2 * pInfo,
3956  VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
3957  {
3958  return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
3959  }
3960 
3963  uint32_t * pSparseMemoryRequirementCount,
3964  VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
3965  {
3966  return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
3967  }
3968 
3969  //=== VK_KHR_acceleration_structure ===
3970 
3972  const VkAccelerationStructureCreateInfoKHR * pCreateInfo,
3973  const VkAllocationCallbacks * pAllocator,
3974  VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
3975  {
3976  return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure );
3977  }
3978 
3979  void vkDestroyAccelerationStructureKHR( VkDevice device,
3980  VkAccelerationStructureKHR accelerationStructure,
3981  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
3982  {
3983  return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator );
3984  }
3985 
3986  void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer,
3987  uint32_t infoCount,
3989  const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
3990  {
3991  return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos );
3992  }
3993 
3994  void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer,
3995  uint32_t infoCount,
3997  const VkDeviceAddress * pIndirectDeviceAddresses,
3998  const uint32_t * pIndirectStrides,
3999  const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT
4000  {
4002  commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts );
4003  }
4004 
4006  VkDeferredOperationKHR deferredOperation,
4007  uint32_t infoCount,
4009  const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
4010  {
4011  return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos );
4012  }
4013 
4015  VkDeferredOperationKHR deferredOperation,
4017  {
4018  return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo );
4019  }
4020 
4022  VkDeferredOperationKHR deferredOperation,
4024  {
4025  return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo );
4026  }
4027 
4029  VkDeferredOperationKHR deferredOperation,
4031  {
4032  return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo );
4033  }
4034 
4036  uint32_t accelerationStructureCount,
4037  const VkAccelerationStructureKHR * pAccelerationStructures,
4038  VkQueryType queryType,
4039  size_t dataSize,
4040  void * pData,
4041  size_t stride ) const VULKAN_HPP_NOEXCEPT
4042  {
4043  return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride );
4044  }
4045 
4046  void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
4047  {
4048  return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo );
4049  }
4050 
4051  void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer,
4053  {
4055  }
4056 
4057  void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer,
4059  {
4061  }
4062 
4065  {
4067  }
4068 
4069  void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer,
4070  uint32_t accelerationStructureCount,
4071  const VkAccelerationStructureKHR * pAccelerationStructures,
4072  VkQueryType queryType,
4073  VkQueryPool queryPool,
4074  uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
4075  {
4077  commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
4078  }
4079 
4081  const VkAccelerationStructureVersionInfoKHR * pVersionInfo,
4083  {
4084  return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility );
4085  }
4086 
4090  const uint32_t * pMaxPrimitiveCounts,
4092  {
4093  return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo );
4094  }
4095 
4096  //=== VK_KHR_sampler_ycbcr_conversion ===
4097 
4099  const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,
4100  const VkAllocationCallbacks * pAllocator,
4101  VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
4102  {
4103  return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion );
4104  }
4105 
4106  void vkDestroySamplerYcbcrConversionKHR( VkDevice device,
4107  VkSamplerYcbcrConversion ycbcrConversion,
4108  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
4109  {
4110  return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator );
4111  }
4112 
4113  //=== VK_KHR_bind_memory2 ===
4114 
4115  VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
4116  {
4117  return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
4118  }
4119 
4120  VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
4121  {
4122  return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
4123  }
4124 
4125  //=== VK_EXT_image_drm_format_modifier ===
4126 
4127  VkResult
4129  {
4131  }
4132 
4133  //=== VK_EXT_validation_cache ===
4134 
4136  const VkValidationCacheCreateInfoEXT * pCreateInfo,
4137  const VkAllocationCallbacks * pAllocator,
4138  VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT
4139  {
4140  return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache );
4141  }
4142 
4143  void
4144  vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
4145  {
4146  return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator );
4147  }
4148 
4150  VkValidationCacheEXT dstCache,
4151  uint32_t srcCacheCount,
4152  const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
4153  {
4154  return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
4155  }
4156 
4157  VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
4158  {
4159  return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
4160  }
4161 
4162  //=== VK_NV_shading_rate_image ===
4163 
4164  void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
4165  {
4166  return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout );
4167  }
4168 
4169  void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer,
4170  uint32_t firstViewport,
4171  uint32_t viewportCount,
4172  const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
4173  {
4174  return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes );
4175  }
4176 
4177  void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer,
4178  VkCoarseSampleOrderTypeNV sampleOrderType,
4179  uint32_t customSampleOrderCount,
4180  const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT
4181  {
4182  return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
4183  }
4184 
4185  //=== VK_NV_ray_tracing ===
4186 
4188  const VkAccelerationStructureCreateInfoNV * pCreateInfo,
4189  const VkAllocationCallbacks * pAllocator,
4190  VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
4191  {
4192  return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
4193  }
4194 
4195  void vkDestroyAccelerationStructureNV( VkDevice device,
4196  VkAccelerationStructureNV accelerationStructure,
4197  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
4198  {
4199  return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator );
4200  }
4201 
4204  VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
4205  {
4206  return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
4207  }
4208 
4210  uint32_t bindInfoCount,
4212  {
4213  return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
4214  }
4215 
4216  void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer,
4217  const VkAccelerationStructureInfoNV * pInfo,
4218  VkBuffer instanceData,
4219  VkDeviceSize instanceOffset,
4220  VkBool32 update,
4221  VkAccelerationStructureNV dst,
4222  VkAccelerationStructureNV src,
4223  VkBuffer scratch,
4224  VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
4225  {
4226  return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
4227  }
4228 
4229  void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer,
4230  VkAccelerationStructureNV dst,
4231  VkAccelerationStructureNV src,
4233  {
4234  return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode );
4235  }
4236 
4237  void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer,
4238  VkBuffer raygenShaderBindingTableBuffer,
4239  VkDeviceSize raygenShaderBindingOffset,
4240  VkBuffer missShaderBindingTableBuffer,
4241  VkDeviceSize missShaderBindingOffset,
4242  VkDeviceSize missShaderBindingStride,
4243  VkBuffer hitShaderBindingTableBuffer,
4244  VkDeviceSize hitShaderBindingOffset,
4245  VkDeviceSize hitShaderBindingStride,
4246  VkBuffer callableShaderBindingTableBuffer,
4247  VkDeviceSize callableShaderBindingOffset,
4248  VkDeviceSize callableShaderBindingStride,
4249  uint32_t width,
4250  uint32_t height,
4251  uint32_t depth ) const VULKAN_HPP_NOEXCEPT
4252  {
4253  return ::vkCmdTraceRaysNV( commandBuffer,
4254  raygenShaderBindingTableBuffer,
4255  raygenShaderBindingOffset,
4256  missShaderBindingTableBuffer,
4257  missShaderBindingOffset,
4258  missShaderBindingStride,
4259  hitShaderBindingTableBuffer,
4260  hitShaderBindingOffset,
4261  hitShaderBindingStride,
4262  callableShaderBindingTableBuffer,
4263  callableShaderBindingOffset,
4264  callableShaderBindingStride,
4265  width,
4266  height,
4267  depth );
4268  }
4269 
4271  VkPipelineCache pipelineCache,
4272  uint32_t createInfoCount,
4273  const VkRayTracingPipelineCreateInfoNV * pCreateInfos,
4274  const VkAllocationCallbacks * pAllocator,
4275  VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
4276  {
4277  return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
4278  }
4279 
4281  VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
4282  {
4283  return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
4284  }
4285 
4287  VkAccelerationStructureNV accelerationStructure,
4288  size_t dataSize,
4289  void * pData ) const VULKAN_HPP_NOEXCEPT
4290  {
4291  return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData );
4292  }
4293 
4294  void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer,
4295  uint32_t accelerationStructureCount,
4296  const VkAccelerationStructureNV * pAccelerationStructures,
4297  VkQueryType queryType,
4298  VkQueryPool queryPool,
4299  uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
4300  {
4302  commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
4303  }
4304 
4305  VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
4306  {
4307  return ::vkCompileDeferredNV( device, pipeline, shader );
4308  }
4309 
4310  //=== VK_KHR_maintenance3 ===
4311 
4312  void vkGetDescriptorSetLayoutSupportKHR( VkDevice device,
4313  const VkDescriptorSetLayoutCreateInfo * pCreateInfo,
4315  {
4316  return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport );
4317  }
4318 
4319  //=== VK_KHR_draw_indirect_count ===
4320 
4321  void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer,
4322  VkBuffer buffer,
4324  VkBuffer countBuffer,
4325  VkDeviceSize countBufferOffset,
4326  uint32_t maxDrawCount,
4327  uint32_t stride ) const VULKAN_HPP_NOEXCEPT
4328  {
4329  return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
4330  }
4331 
4332  void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer,
4333  VkBuffer buffer,
4335  VkBuffer countBuffer,
4336  VkDeviceSize countBufferOffset,
4337  uint32_t maxDrawCount,
4338  uint32_t stride ) const VULKAN_HPP_NOEXCEPT
4339  {
4340  return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
4341  }
4342 
4343  //=== VK_EXT_external_memory_host ===
4344 
4347  const void * pHostPointer,
4348  VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT
4349  {
4350  return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties );
4351  }
4352 
4353  //=== VK_AMD_buffer_marker ===
4354 
4355  void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer,
4356  VkPipelineStageFlagBits pipelineStage,
4357  VkBuffer dstBuffer,
4358  VkDeviceSize dstOffset,
4359  uint32_t marker ) const VULKAN_HPP_NOEXCEPT
4360  {
4361  return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker );
4362  }
4363 
4364  //=== VK_EXT_calibrated_timestamps ===
4365 
4367  uint32_t * pTimeDomainCount,
4368  VkTimeDomainEXT * pTimeDomains ) const VULKAN_HPP_NOEXCEPT
4369  {
4370  return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains );
4371  }
4372 
4374  uint32_t timestampCount,
4375  const VkCalibratedTimestampInfoEXT * pTimestampInfos,
4376  uint64_t * pTimestamps,
4377  uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
4378  {
4379  return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
4380  }
4381 
4382  //=== VK_NV_mesh_shader ===
4383 
4384  void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
4385  {
4386  return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask );
4387  }
4388 
4389  void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const
4391  {
4392  return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride );
4393  }
4394 
4395  void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer,
4396  VkBuffer buffer,
4398  VkBuffer countBuffer,
4399  VkDeviceSize countBufferOffset,
4400  uint32_t maxDrawCount,
4401  uint32_t stride ) const VULKAN_HPP_NOEXCEPT
4402  {
4403  return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
4404  }
4405 
4406  //=== VK_NV_scissor_exclusive ===
4407 
4408  void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer,
4409  uint32_t firstExclusiveScissor,
4410  uint32_t exclusiveScissorCount,
4411  const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
4412  {
4413  return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
4414  }
4415 
4416  //=== VK_NV_device_diagnostic_checkpoints ===
4417 
4418  void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT
4419  {
4420  return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker );
4421  }
4422 
4423  void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
4424  {
4425  return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
4426  }
4427 
4428  //=== VK_KHR_timeline_semaphore ===
4429 
4430  VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
4431  {
4432  return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
4433  }
4434 
4435  VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
4436  {
4437  return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
4438  }
4439 
4440  VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
4441  {
4442  return ::vkSignalSemaphoreKHR( device, pSignalInfo );
4443  }
4444 
4445  //=== VK_INTEL_performance_query ===
4446 
4448  {
4449  return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
4450  }
4451 
4453  {
4455  }
4456 
4457  VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
4458  {
4459  return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo );
4460  }
4461 
4462  VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer,
4463  const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
4464  {
4465  return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo );
4466  }
4467 
4468  VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT
4469  {
4470  return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo );
4471  }
4472 
4474  const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
4475  VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT
4476  {
4477  return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
4478  }
4479 
4480  VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
4481  {
4482  return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
4483  }
4484 
4485  VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
4486  {
4488  }
4489 
4490  VkResult
4492  {
4493  return ::vkGetPerformanceParameterINTEL( device, parameter, pValue );
4494  }
4495 
4496  //=== VK_AMD_display_native_hdr ===
4497 
4498  void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
4499  {
4500  return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
4501  }
4502 
4503 # if defined( VK_USE_PLATFORM_FUCHSIA )
4504  //=== VK_FUCHSIA_imagepipe_surface ===
4505 
4506  VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance,
4507  const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
4508  const VkAllocationCallbacks * pAllocator,
4509  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
4510  {
4511  return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
4512  }
4513 # endif /*VK_USE_PLATFORM_FUCHSIA*/
4514 
4515 # if defined( VK_USE_PLATFORM_METAL_EXT )
4516  //=== VK_EXT_metal_surface ===
4517 
4518  VkResult vkCreateMetalSurfaceEXT( VkInstance instance,
4519  const VkMetalSurfaceCreateInfoEXT * pCreateInfo,
4520  const VkAllocationCallbacks * pAllocator,
4521  VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
4522  {
4523  return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
4524  }
4525 # endif /*VK_USE_PLATFORM_METAL_EXT*/
4526 
4527  //=== VK_KHR_fragment_shading_rate ===
4528 
4529  VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice,
4530  uint32_t * pFragmentShadingRateCount,
4531  VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT
4532  {
4533  return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates );
4534  }
4535 
4536  void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer,
4537  const VkExtent2D * pFragmentSize,
4538  const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
4539  {
4540  return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps );
4541  }
4542 
4543  //=== VK_EXT_buffer_device_address ===
4544 
4546  {
4547  return ::vkGetBufferDeviceAddressEXT( device, pInfo );
4548  }
4549 
4550  //=== VK_EXT_tooling_info ===
4551 
4552  VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice,
4553  uint32_t * pToolCount,
4554  VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
4555  {
4556  return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
4557  }
4558 
4559  //=== VK_KHR_present_wait ===
4560 
4561  VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
4562  {
4563  return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout );
4564  }
4565 
4566  //=== VK_NV_cooperative_matrix ===
4567 
4569  uint32_t * pPropertyCount,
4571  {
4572  return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
4573  }
4574 
4575  //=== VK_NV_coverage_reduction_mode ===
4576 
4578  VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT
4579  {
4580  return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations );
4581  }
4582 
4583 # if defined( VK_USE_PLATFORM_WIN32_KHR )
4584  //=== VK_EXT_full_screen_exclusive ===
4585