HDK
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
vulkan_structs.hpp
Go to the documentation of this file.
1 // Copyright 2015-2022 The Khronos Group Inc.
2 //
3 // SPDX-License-Identifier: Apache-2.0 OR MIT
4 //
5 
6 // This header is generated from the Khronos Vulkan XML API Registry.
7 
8 #ifndef VULKAN_STRUCTS_HPP
9 #define VULKAN_STRUCTS_HPP
10 
11 #include <cstring> // strcmp
12 
13 namespace VULKAN_HPP_NAMESPACE
14 {
15  //===============
16  //=== STRUCTS ===
17  //===============
18 
20  {
22 
23 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
25  AabbPositionsKHR( float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT
26  : minX( minX_ )
27  , minY( minY_ )
28  , minZ( minZ_ )
29  , maxX( maxX_ )
30  , maxY( maxY_ )
31  , maxZ( maxZ_ )
32  {
33  }
34 
36 
37  AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AabbPositionsKHR( *reinterpret_cast<AabbPositionsKHR const *>( &rhs ) ) {}
38 
40 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
41 
43  {
44  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AabbPositionsKHR const *>( &rhs );
45  return *this;
46  }
47 
48 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
50  {
51  minX = minX_;
52  return *this;
53  }
54 
56  {
57  minY = minY_;
58  return *this;
59  }
60 
62  {
63  minZ = minZ_;
64  return *this;
65  }
66 
68  {
69  maxX = maxX_;
70  return *this;
71  }
72 
74  {
75  maxY = maxY_;
76  return *this;
77  }
78 
80  {
81  maxZ = maxZ_;
82  return *this;
83  }
84 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
85 
86  operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT
87  {
88  return *reinterpret_cast<const VkAabbPositionsKHR *>( this );
89  }
90 
92  {
93  return *reinterpret_cast<VkAabbPositionsKHR *>( this );
94  }
95 
96 #if defined( VULKAN_HPP_USE_REFLECT )
97 # if 14 <= VULKAN_HPP_CPP_VERSION
98  auto
99 # else
100  std::tuple<float const &, float const &, float const &, float const &, float const &, float const &>
101 # endif
102  reflect() const VULKAN_HPP_NOEXCEPT
103  {
104  return std::tie( minX, minY, minZ, maxX, maxY, maxZ );
105  }
106 #endif
107 
108 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
109  auto operator<=>( AabbPositionsKHR const & ) const = default;
110 #else
112  {
113 # if defined( VULKAN_HPP_USE_REFLECT )
114  return this->reflect() == rhs.reflect();
115 # else
116  return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ );
117 # endif
118  }
119 
121  {
122  return !operator==( rhs );
123  }
124 #endif
125 
126  public:
127  float minX = {};
128  float minY = {};
129  float minZ = {};
130  float maxX = {};
131  float maxY = {};
132  float maxZ = {};
133  };
134  using AabbPositionsNV = AabbPositionsKHR;
135 
137  {
139 #if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
140 
142 
143  VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {}
144 #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
145 
146 #if !defined( VULKAN_HPP_NO_UNION_SETTERS )
148  {
149  deviceAddress = deviceAddress_;
150  return *this;
151  }
152 
154  {
155  hostAddress = hostAddress_;
156  return *this;
157  }
158 #endif /*VULKAN_HPP_NO_UNION_SETTERS*/
159 
160  operator VkDeviceOrHostAddressConstKHR const &() const
161  {
162  return *reinterpret_cast<const VkDeviceOrHostAddressConstKHR *>( this );
163  }
164 
166  {
167  return *reinterpret_cast<VkDeviceOrHostAddressConstKHR *>( this );
168  }
169 
170 #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
172  const void * hostAddress;
173 #else
175  const void * hostAddress;
176 #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
177  };
178 
180  {
182 
183  static const bool allowDuplicate = false;
185 
186 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
190  VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {},
191  uint32_t maxVertex_ = {},
195  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
196  : pNext( pNext_ )
197  , vertexFormat( vertexFormat_ )
198  , vertexData( vertexData_ )
199  , vertexStride( vertexStride_ )
200  , maxVertex( maxVertex_ )
201  , indexType( indexType_ )
202  , indexData( indexData_ )
203  , transformData( transformData_ )
204  {
205  }
206 
209 
211  : AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs ) )
212  {
213  }
214 
216 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
217 
219  {
220  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
221  return *this;
222  }
223 
224 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
226  {
227  pNext = pNext_;
228  return *this;
229  }
230 
232  {
233  vertexFormat = vertexFormat_;
234  return *this;
235  }
236 
239  {
240  vertexData = vertexData_;
241  return *this;
242  }
243 
246  {
247  vertexStride = vertexStride_;
248  return *this;
249  }
250 
252  {
253  maxVertex = maxVertex_;
254  return *this;
255  }
256 
258  {
259  indexType = indexType_;
260  return *this;
261  }
262 
265  {
266  indexData = indexData_;
267  return *this;
268  }
269 
272  {
273  transformData = transformData_;
274  return *this;
275  }
276 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
277 
279  {
280  return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
281  }
282 
284  {
285  return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
286  }
287 
288 #if defined( VULKAN_HPP_USE_REFLECT )
289 # if 14 <= VULKAN_HPP_CPP_VERSION
290  auto
291 # else
292  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
293  const void * const &,
297  uint32_t const &,
301 # endif
302  reflect() const VULKAN_HPP_NOEXCEPT
303  {
305  }
306 #endif
307 
308  public:
310  const void * pNext = {};
314  uint32_t maxVertex = {};
318  };
319 
320  template <>
322  {
324  };
325 
327  {
329 
330  static const bool allowDuplicate = false;
332 
333 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
336  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
337  : pNext( pNext_ )
338  , data( data_ )
339  , stride( stride_ )
340  {
341  }
342 
344 
346  : AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs ) )
347  {
348  }
349 
351 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
352 
354  {
355  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
356  return *this;
357  }
358 
359 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
361  {
362  pNext = pNext_;
363  return *this;
364  }
365 
368  {
369  data = data_;
370  return *this;
371  }
372 
374  {
375  stride = stride_;
376  return *this;
377  }
378 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
379 
381  {
382  return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR *>( this );
383  }
384 
386  {
387  return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR *>( this );
388  }
389 
390 #if defined( VULKAN_HPP_USE_REFLECT )
391 # if 14 <= VULKAN_HPP_CPP_VERSION
392  auto
393 # else
394  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
395  const void * const &,
398 # endif
399  reflect() const VULKAN_HPP_NOEXCEPT
400  {
401  return std::tie( sType, pNext, data, stride );
402  }
403 #endif
404 
405  public:
407  const void * pNext = {};
410  };
411 
412  template <>
414  {
416  };
417 
419  {
421 
422  static const bool allowDuplicate = false;
424 
425 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
428  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
429  : pNext( pNext_ )
430  , arrayOfPointers( arrayOfPointers_ )
431  , data( data_ )
432  {
433  }
434 
437 
439  : AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs ) )
440  {
441  }
442 
444 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
445 
447  {
448  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
449  return *this;
450  }
451 
452 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
454  {
455  pNext = pNext_;
456  return *this;
457  }
458 
461  {
462  arrayOfPointers = arrayOfPointers_;
463  return *this;
464  }
465 
468  {
469  data = data_;
470  return *this;
471  }
472 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
473 
475  {
476  return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR *>( this );
477  }
478 
480  {
481  return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR *>( this );
482  }
483 
484 #if defined( VULKAN_HPP_USE_REFLECT )
485 # if 14 <= VULKAN_HPP_CPP_VERSION
486  auto
487 # else
488  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
489  const void * const &,
492 # endif
493  reflect() const VULKAN_HPP_NOEXCEPT
494  {
495  return std::tie( sType, pNext, arrayOfPointers, data );
496  }
497 #endif
498 
499  public:
501  const void * pNext = {};
504  };
505 
506  template <>
508  {
510  };
511 
513  {
515 #if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
516 
518  : triangles( triangles_ )
519  {
520  }
521 
523 
525  : instances( instances_ )
526  {
527  }
528 #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
529 
530 #if !defined( VULKAN_HPP_NO_UNION_SETTERS )
533  {
534  triangles = triangles_;
535  return *this;
536  }
537 
540  {
541  aabbs = aabbs_;
542  return *this;
543  }
544 
547  {
548  instances = instances_;
549  return *this;
550  }
551 #endif /*VULKAN_HPP_NO_UNION_SETTERS*/
552 
553  operator VkAccelerationStructureGeometryDataKHR const &() const
554  {
555  return *reinterpret_cast<const VkAccelerationStructureGeometryDataKHR *>( this );
556  }
557 
559  {
560  return *reinterpret_cast<VkAccelerationStructureGeometryDataKHR *>( this );
561  }
562 
563 #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
567 #else
571 #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
572  };
573 
575  {
577 
578  static const bool allowDuplicate = false;
580 
581 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
586  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
587  : pNext( pNext_ )
588  , geometryType( geometryType_ )
589  , geometry( geometry_ )
590  , flags( flags_ )
591  {
592  }
593 
595 
597  : AccelerationStructureGeometryKHR( *reinterpret_cast<AccelerationStructureGeometryKHR const *>( &rhs ) )
598  {
599  }
600 
602 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
603 
605  {
606  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR const *>( &rhs );
607  return *this;
608  }
609 
610 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
612  {
613  pNext = pNext_;
614  return *this;
615  }
616 
618  {
619  geometryType = geometryType_;
620  return *this;
621  }
622 
625  {
626  geometry = geometry_;
627  return *this;
628  }
629 
631  {
632  flags = flags_;
633  return *this;
634  }
635 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
636 
638  {
639  return *reinterpret_cast<const VkAccelerationStructureGeometryKHR *>( this );
640  }
641 
643  {
644  return *reinterpret_cast<VkAccelerationStructureGeometryKHR *>( this );
645  }
646 
647 #if defined( VULKAN_HPP_USE_REFLECT )
648 # if 14 <= VULKAN_HPP_CPP_VERSION
649  auto
650 # else
651  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
652  const void * const &,
656 # endif
657  reflect() const VULKAN_HPP_NOEXCEPT
658  {
659  return std::tie( sType, pNext, geometryType, geometry, flags );
660  }
661 #endif
662 
663  public:
665  const void * pNext = {};
669  };
670 
671  template <>
673  {
675  };
676 
678  {
680 #if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
681 
683 
684  VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {}
685 #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
686 
687 #if !defined( VULKAN_HPP_NO_UNION_SETTERS )
689  {
690  deviceAddress = deviceAddress_;
691  return *this;
692  }
693 
695  {
696  hostAddress = hostAddress_;
697  return *this;
698  }
699 #endif /*VULKAN_HPP_NO_UNION_SETTERS*/
700 
701  operator VkDeviceOrHostAddressKHR const &() const
702  {
703  return *reinterpret_cast<const VkDeviceOrHostAddressKHR *>( this );
704  }
705 
707  {
708  return *reinterpret_cast<VkDeviceOrHostAddressKHR *>( this );
709  }
710 
711 #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
713  void * hostAddress;
714 #else
716  void * hostAddress;
717 #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
718  };
719 
721  {
723 
724  static const bool allowDuplicate = false;
726 
727 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
732  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {},
733  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {},
734  uint32_t geometryCount_ = {},
736  const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {},
738  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
739  : pNext( pNext_ )
740  , type( type_ )
741  , flags( flags_ )
742  , mode( mode_ )
743  , srcAccelerationStructure( srcAccelerationStructure_ )
744  , dstAccelerationStructure( dstAccelerationStructure_ )
745  , geometryCount( geometryCount_ )
746  , pGeometries( pGeometries_ )
747  , ppGeometries( ppGeometries_ )
748  , scratchData( scratchData_ )
749  {
750  }
751 
753 
755  : AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs ) )
756  {
757  }
758 
759 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
764  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_,
765  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_,
769  const void * pNext_ = nullptr )
770  : pNext( pNext_ )
771  , type( type_ )
772  , flags( flags_ )
773  , mode( mode_ )
774  , srcAccelerationStructure( srcAccelerationStructure_ )
775  , dstAccelerationStructure( dstAccelerationStructure_ )
776  , geometryCount( static_cast<uint32_t>( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) )
777  , pGeometries( geometries_.data() )
778  , ppGeometries( pGeometries_.data() )
779  , scratchData( scratchData_ )
780  {
781 # ifdef VULKAN_HPP_NO_EXCEPTIONS
782  VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1 );
783 # else
784  if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) )
785  {
786  throw LogicError(
788  "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" );
789  }
790 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
791  }
792 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
793 
795 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
796 
798  {
799  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
800  return *this;
801  }
802 
803 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
805  {
806  pNext = pNext_;
807  return *this;
808  }
809 
811  {
812  type = type_;
813  return *this;
814  }
815 
818  {
819  flags = flags_;
820  return *this;
821  }
822 
825  {
826  mode = mode_;
827  return *this;
828  }
829 
832  {
833  srcAccelerationStructure = srcAccelerationStructure_;
834  return *this;
835  }
836 
839  {
840  dstAccelerationStructure = dstAccelerationStructure_;
841  return *this;
842  }
843 
845  {
846  geometryCount = geometryCount_;
847  return *this;
848  }
849 
852  {
853  pGeometries = pGeometries_;
854  return *this;
855  }
856 
857 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
860  {
861  geometryCount = static_cast<uint32_t>( geometries_.size() );
862  pGeometries = geometries_.data();
863  return *this;
864  }
865 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
866 
869  {
870  ppGeometries = ppGeometries_;
871  return *this;
872  }
873 
874 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
878  {
879  geometryCount = static_cast<uint32_t>( pGeometries_.size() );
880  ppGeometries = pGeometries_.data();
881  return *this;
882  }
883 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
884 
887  {
888  scratchData = scratchData_;
889  return *this;
890  }
891 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
892 
894  {
895  return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( this );
896  }
897 
899  {
900  return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR *>( this );
901  }
902 
903 #if defined( VULKAN_HPP_USE_REFLECT )
904 # if 14 <= VULKAN_HPP_CPP_VERSION
905  auto
906 # else
907  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
908  const void * const &,
914  uint32_t const &,
918 # endif
919  reflect() const VULKAN_HPP_NOEXCEPT
920  {
921  return std::tie(
923  }
924 #endif
925 
926  public:
928  const void * pNext = {};
934  uint32_t geometryCount = {};
938  };
939 
940  template <>
942  {
944  };
945 
947  {
949 
950 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
952  uint32_t primitiveOffset_ = {},
953  uint32_t firstVertex_ = {},
954  uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT
955  : primitiveCount( primitiveCount_ )
956  , primitiveOffset( primitiveOffset_ )
957  , firstVertex( firstVertex_ )
958  , transformOffset( transformOffset_ )
959  {
960  }
961 
963 
965  : AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs ) )
966  {
967  }
968 
970 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
971 
973  {
974  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const *>( &rhs );
975  return *this;
976  }
977 
978 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
980  {
981  primitiveCount = primitiveCount_;
982  return *this;
983  }
984 
986  {
987  primitiveOffset = primitiveOffset_;
988  return *this;
989  }
990 
992  {
993  firstVertex = firstVertex_;
994  return *this;
995  }
996 
998  {
999  transformOffset = transformOffset_;
1000  return *this;
1001  }
1002 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
1003 
1005  {
1006  return *reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR *>( this );
1007  }
1008 
1010  {
1011  return *reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR *>( this );
1012  }
1013 
1014 #if defined( VULKAN_HPP_USE_REFLECT )
1015 # if 14 <= VULKAN_HPP_CPP_VERSION
1016  auto
1017 # else
1018  std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
1019 # endif
1020  reflect() const VULKAN_HPP_NOEXCEPT
1021  {
1023  }
1024 #endif
1025 
1026 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
1027  auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default;
1028 #else
1030  {
1031 # if defined( VULKAN_HPP_USE_REFLECT )
1032  return this->reflect() == rhs.reflect();
1033 # else
1034  return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && ( firstVertex == rhs.firstVertex ) &&
1035  ( transformOffset == rhs.transformOffset );
1036 # endif
1037  }
1038 
1040  {
1041  return !operator==( rhs );
1042  }
1043 #endif
1044 
1045  public:
1046  uint32_t primitiveCount = {};
1047  uint32_t primitiveOffset = {};
1048  uint32_t firstVertex = {};
1049  uint32_t transformOffset = {};
1050  };
1051 
1053  {
1055 
1056  static const bool allowDuplicate = false;
1058 
1059 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
1061  VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {},
1062  VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {},
1063  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
1064  : pNext( pNext_ )
1065  , accelerationStructureSize( accelerationStructureSize_ )
1066  , updateScratchSize( updateScratchSize_ )
1067  , buildScratchSize( buildScratchSize_ )
1068  {
1069  }
1070 
1072 
1074  : AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs ) )
1075  {
1076  }
1077 
1079 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
1080 
1082  {
1083  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const *>( &rhs );
1084  return *this;
1085  }
1086 
1087 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
1089  {
1090  pNext = pNext_;
1091  return *this;
1092  }
1093 
1096  {
1097  accelerationStructureSize = accelerationStructureSize_;
1098  return *this;
1099  }
1100 
1103  {
1104  updateScratchSize = updateScratchSize_;
1105  return *this;
1106  }
1107 
1110  {
1111  buildScratchSize = buildScratchSize_;
1112  return *this;
1113  }
1114 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
1115 
1117  {
1118  return *reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR *>( this );
1119  }
1120 
1122  {
1123  return *reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( this );
1124  }
1125 
1126 #if defined( VULKAN_HPP_USE_REFLECT )
1127 # if 14 <= VULKAN_HPP_CPP_VERSION
1128  auto
1129 # else
1130  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
1131  const void * const &,
1135 # endif
1136  reflect() const VULKAN_HPP_NOEXCEPT
1137  {
1139  }
1140 #endif
1141 
1142 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
1143  auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default;
1144 #else
1146  {
1147 # if defined( VULKAN_HPP_USE_REFLECT )
1148  return this->reflect() == rhs.reflect();
1149 # else
1150  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureSize == rhs.accelerationStructureSize ) &&
1151  ( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize );
1152 # endif
1153  }
1154 
1156  {
1157  return !operator==( rhs );
1158  }
1159 #endif
1160 
1161  public:
1163  const void * pNext = {};
1167  };
1168 
1169  template <>
1171  {
1173  };
1174 
1176  {
1178 
1179  static const bool allowDuplicate = false;
1181 
1182 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
1184  VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ = {},
1185  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
1186  : pNext( pNext_ )
1187  , accelerationStructure( accelerationStructure_ )
1188  , accelerationStructureNV( accelerationStructureNV_ )
1189  {
1190  }
1191 
1194 
1196  : AccelerationStructureCaptureDescriptorDataInfoEXT( *reinterpret_cast<AccelerationStructureCaptureDescriptorDataInfoEXT const *>( &rhs ) )
1197  {
1198  }
1199 
1202 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
1203 
1205  {
1206  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT const *>( &rhs );
1207  return *this;
1208  }
1209 
1210 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
1212  {
1213  pNext = pNext_;
1214  return *this;
1215  }
1216 
1219  {
1220  accelerationStructure = accelerationStructure_;
1221  return *this;
1222  }
1223 
1226  {
1227  accelerationStructureNV = accelerationStructureNV_;
1228  return *this;
1229  }
1230 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
1231 
1233  {
1234  return *reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( this );
1235  }
1236 
1238  {
1239  return *reinterpret_cast<VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( this );
1240  }
1241 
1242 #if defined( VULKAN_HPP_USE_REFLECT )
1243 # if 14 <= VULKAN_HPP_CPP_VERSION
1244  auto
1245 # else
1246  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
1247  const void * const &,
1250 # endif
1251  reflect() const VULKAN_HPP_NOEXCEPT
1252  {
1254  }
1255 #endif
1256 
1257 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
1258  auto operator<=>( AccelerationStructureCaptureDescriptorDataInfoEXT const & ) const = default;
1259 #else
1261  {
1262 # if defined( VULKAN_HPP_USE_REFLECT )
1263  return this->reflect() == rhs.reflect();
1264 # else
1265  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) &&
1266  ( accelerationStructureNV == rhs.accelerationStructureNV );
1267 # endif
1268  }
1269 
1271  {
1272  return !operator==( rhs );
1273  }
1274 #endif
1275 
1276  public:
1278  const void * pNext = {};
1281  };
1282 
1283  template <>
1285  {
1287  };
1288 
1290  {
1292 
1293  static const bool allowDuplicate = false;
1295 
1296 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
1299  VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
1300  VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
1303  VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
1304  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
1305  : pNext( pNext_ )
1306  , createFlags( createFlags_ )
1307  , buffer( buffer_ )
1308  , offset( offset_ )
1309  , size( size_ )
1310  , type( type_ )
1311  , deviceAddress( deviceAddress_ )
1312  {
1313  }
1314 
1316 
1318  : AccelerationStructureCreateInfoKHR( *reinterpret_cast<AccelerationStructureCreateInfoKHR const *>( &rhs ) )
1319  {
1320  }
1321 
1323 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
1324 
1326  {
1327  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const *>( &rhs );
1328  return *this;
1329  }
1330 
1331 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
1333  {
1334  pNext = pNext_;
1335  return *this;
1336  }
1337 
1340  {
1341  createFlags = createFlags_;
1342  return *this;
1343  }
1344 
1346  {
1347  buffer = buffer_;
1348  return *this;
1349  }
1350 
1352  {
1353  offset = offset_;
1354  return *this;
1355  }
1356 
1358  {
1359  size = size_;
1360  return *this;
1361  }
1362 
1364  {
1365  type = type_;
1366  return *this;
1367  }
1368 
1370  {
1371  deviceAddress = deviceAddress_;
1372  return *this;
1373  }
1374 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
1375 
1377  {
1378  return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( this );
1379  }
1380 
1382  {
1383  return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR *>( this );
1384  }
1385 
1386 #if defined( VULKAN_HPP_USE_REFLECT )
1387 # if 14 <= VULKAN_HPP_CPP_VERSION
1388  auto
1389 # else
1390  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
1391  const void * const &,
1398 # endif
1399  reflect() const VULKAN_HPP_NOEXCEPT
1400  {
1401  return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
1402  }
1403 #endif
1404 
1405 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
1406  auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default;
1407 #else
1409  {
1410 # if defined( VULKAN_HPP_USE_REFLECT )
1411  return this->reflect() == rhs.reflect();
1412 # else
1413  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) &&
1414  ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress );
1415 # endif
1416  }
1417 
1419  {
1420  return !operator==( rhs );
1421  }
1422 #endif
1423 
1424  public:
1426  const void * pNext = {};
1433  };
1434 
1435  template <>
1437  {
1439  };
1440 
1442  {
1444 
1445  static const bool allowDuplicate = false;
1447 
1448 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
1450  VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {},
1451  uint32_t vertexCount_ = {},
1452  VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {},
1454  VULKAN_HPP_NAMESPACE::Buffer indexData_ = {},
1455  VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {},
1456  uint32_t indexCount_ = {},
1458  VULKAN_HPP_NAMESPACE::Buffer transformData_ = {},
1459  VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {},
1460  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
1461  : pNext( pNext_ )
1462  , vertexData( vertexData_ )
1463  , vertexOffset( vertexOffset_ )
1464  , vertexCount( vertexCount_ )
1465  , vertexStride( vertexStride_ )
1466  , vertexFormat( vertexFormat_ )
1467  , indexData( indexData_ )
1468  , indexOffset( indexOffset_ )
1469  , indexCount( indexCount_ )
1470  , indexType( indexType_ )
1471  , transformData( transformData_ )
1472  , transformOffset( transformOffset_ )
1473  {
1474  }
1475 
1477 
1478  GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryTrianglesNV( *reinterpret_cast<GeometryTrianglesNV const *>( &rhs ) )
1479  {
1480  }
1481 
1483 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
1484 
1486  {
1487  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>( &rhs );
1488  return *this;
1489  }
1490 
1491 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
1493  {
1494  pNext = pNext_;
1495  return *this;
1496  }
1497 
1499  {
1500  vertexData = vertexData_;
1501  return *this;
1502  }
1503 
1505  {
1506  vertexOffset = vertexOffset_;
1507  return *this;
1508  }
1509 
1511  {
1512  vertexCount = vertexCount_;
1513  return *this;
1514  }
1515 
1517  {
1518  vertexStride = vertexStride_;
1519  return *this;
1520  }
1521 
1523  {
1524  vertexFormat = vertexFormat_;
1525  return *this;
1526  }
1527 
1529  {
1530  indexData = indexData_;
1531  return *this;
1532  }
1533 
1535  {
1536  indexOffset = indexOffset_;
1537  return *this;
1538  }
1539 
1541  {
1542  indexCount = indexCount_;
1543  return *this;
1544  }
1545 
1547  {
1548  indexType = indexType_;
1549  return *this;
1550  }
1551 
1553  {
1554  transformData = transformData_;
1555  return *this;
1556  }
1557 
1559  {
1560  transformOffset = transformOffset_;
1561  return *this;
1562  }
1563 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
1564 
1566  {
1567  return *reinterpret_cast<const VkGeometryTrianglesNV *>( this );
1568  }
1569 
1571  {
1572  return *reinterpret_cast<VkGeometryTrianglesNV *>( this );
1573  }
1574 
1575 #if defined( VULKAN_HPP_USE_REFLECT )
1576 # if 14 <= VULKAN_HPP_CPP_VERSION
1577  auto
1578 # else
1579  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
1580  const void * const &,
1583  uint32_t const &,
1588  uint32_t const &,
1592 # endif
1593  reflect() const VULKAN_HPP_NOEXCEPT
1594  {
1595  return std::tie( sType,
1596  pNext,
1597  vertexData,
1598  vertexOffset,
1599  vertexCount,
1600  vertexStride,
1601  vertexFormat,
1602  indexData,
1603  indexOffset,
1604  indexCount,
1605  indexType,
1606  transformData,
1607  transformOffset );
1608  }
1609 #endif
1610 
1611 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
1612  auto operator<=>( GeometryTrianglesNV const & ) const = default;
1613 #else
1615  {
1616 # if defined( VULKAN_HPP_USE_REFLECT )
1617  return this->reflect() == rhs.reflect();
1618 # else
1619  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && ( vertexOffset == rhs.vertexOffset ) &&
1620  ( vertexCount == rhs.vertexCount ) && ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) &&
1621  ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && ( indexType == rhs.indexType ) &&
1622  ( transformData == rhs.transformData ) && ( transformOffset == rhs.transformOffset );
1623 # endif
1624  }
1625 
1627  {
1628  return !operator==( rhs );
1629  }
1630 #endif
1631 
1632  public:
1634  const void * pNext = {};
1637  uint32_t vertexCount = {};
1642  uint32_t indexCount = {};
1646  };
1647 
1648  template <>
1650  {
1652  };
1653 
1655  {
1657 
1658  static const bool allowDuplicate = false;
1660 
1661 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
1663  uint32_t numAABBs_ = {},
1664  uint32_t stride_ = {},
1665  VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
1666  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
1667  : pNext( pNext_ )
1668  , aabbData( aabbData_ )
1669  , numAABBs( numAABBs_ )
1670  , stride( stride_ )
1671  , offset( offset_ )
1672  {
1673  }
1674 
1676 
1677  GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryAABBNV( *reinterpret_cast<GeometryAABBNV const *>( &rhs ) ) {}
1678 
1679  GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
1680 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
1681 
1683  {
1684  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>( &rhs );
1685  return *this;
1686  }
1687 
1688 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
1690  {
1691  pNext = pNext_;
1692  return *this;
1693  }
1694 
1696  {
1697  aabbData = aabbData_;
1698  return *this;
1699  }
1700 
1702  {
1703  numAABBs = numAABBs_;
1704  return *this;
1705  }
1706 
1708  {
1709  stride = stride_;
1710  return *this;
1711  }
1712 
1714  {
1715  offset = offset_;
1716  return *this;
1717  }
1718 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
1719 
1720  operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT
1721  {
1722  return *reinterpret_cast<const VkGeometryAABBNV *>( this );
1723  }
1724 
1726  {
1727  return *reinterpret_cast<VkGeometryAABBNV *>( this );
1728  }
1729 
1730 #if defined( VULKAN_HPP_USE_REFLECT )
1731 # if 14 <= VULKAN_HPP_CPP_VERSION
1732  auto
1733 # else
1734  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
1735  const void * const &,
1737  uint32_t const &,
1738  uint32_t const &,
1740 # endif
1741  reflect() const VULKAN_HPP_NOEXCEPT
1742  {
1743  return std::tie( sType, pNext, aabbData, numAABBs, stride, offset );
1744  }
1745 #endif
1746 
1747 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
1748  auto operator<=>( GeometryAABBNV const & ) const = default;
1749 #else
1751  {
1752 # if defined( VULKAN_HPP_USE_REFLECT )
1753  return this->reflect() == rhs.reflect();
1754 # else
1755  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) &&
1756  ( offset == rhs.offset );
1757 # endif
1758  }
1759 
1761  {
1762  return !operator==( rhs );
1763  }
1764 #endif
1765 
1766  public:
1768  const void * pNext = {};
1770  uint32_t numAABBs = {};
1771  uint32_t stride = {};
1773  };
1774 
1775  template <>
1777  {
1779  };
1780 
1782  {
1784 
1785 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
1788  : triangles( triangles_ )
1789  , aabbs( aabbs_ )
1790  {
1791  }
1792 
1794 
1795  GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryDataNV( *reinterpret_cast<GeometryDataNV const *>( &rhs ) ) {}
1796 
1797  GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
1798 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
1799 
1801  {
1802  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>( &rhs );
1803  return *this;
1804  }
1805 
1806 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
1808  {
1809  triangles = triangles_;
1810  return *this;
1811  }
1812 
1814  {
1815  aabbs = aabbs_;
1816  return *this;
1817  }
1818 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
1819 
1820  operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT
1821  {
1822  return *reinterpret_cast<const VkGeometryDataNV *>( this );
1823  }
1824 
1826  {
1827  return *reinterpret_cast<VkGeometryDataNV *>( this );
1828  }
1829 
1830 #if defined( VULKAN_HPP_USE_REFLECT )
1831 # if 14 <= VULKAN_HPP_CPP_VERSION
1832  auto
1833 # else
1834  std::tuple<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const &, VULKAN_HPP_NAMESPACE::GeometryAABBNV const &>
1835 # endif
1836  reflect() const VULKAN_HPP_NOEXCEPT
1837  {
1838  return std::tie( triangles, aabbs );
1839  }
1840 #endif
1841 
1842 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
1843  auto operator<=>( GeometryDataNV const & ) const = default;
1844 #else
1846  {
1847 # if defined( VULKAN_HPP_USE_REFLECT )
1848  return this->reflect() == rhs.reflect();
1849 # else
1850  return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs );
1851 # endif
1852  }
1853 
1855  {
1856  return !operator==( rhs );
1857  }
1858 #endif
1859 
1860  public:
1863  };
1864 
1865  struct GeometryNV
1866  {
1868 
1869  static const bool allowDuplicate = false;
1871 
1872 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
1874  VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {},
1876  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
1877  : pNext( pNext_ )
1878  , geometryType( geometryType_ )
1879  , geometry( geometry_ )
1880  , flags( flags_ )
1881  {
1882  }
1883 
1885 
1886  GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryNV( *reinterpret_cast<GeometryNV const *>( &rhs ) ) {}
1887 
1888  GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
1889 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
1890 
1892  {
1893  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>( &rhs );
1894  return *this;
1895  }
1896 
1897 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
1899  {
1900  pNext = pNext_;
1901  return *this;
1902  }
1903 
1905  {
1906  geometryType = geometryType_;
1907  return *this;
1908  }
1909 
1911  {
1912  geometry = geometry_;
1913  return *this;
1914  }
1915 
1917  {
1918  flags = flags_;
1919  return *this;
1920  }
1921 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
1922 
1923  operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT
1924  {
1925  return *reinterpret_cast<const VkGeometryNV *>( this );
1926  }
1927 
1929  {
1930  return *reinterpret_cast<VkGeometryNV *>( this );
1931  }
1932 
1933 #if defined( VULKAN_HPP_USE_REFLECT )
1934 # if 14 <= VULKAN_HPP_CPP_VERSION
1935  auto
1936 # else
1937  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
1938  const void * const &,
1942 # endif
1943  reflect() const VULKAN_HPP_NOEXCEPT
1944  {
1945  return std::tie( sType, pNext, geometryType, geometry, flags );
1946  }
1947 #endif
1948 
1949 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
1950  auto operator<=>( GeometryNV const & ) const = default;
1951 #else
1952  bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
1953  {
1954 # if defined( VULKAN_HPP_USE_REFLECT )
1955  return this->reflect() == rhs.reflect();
1956 # else
1957  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && ( geometry == rhs.geometry ) && ( flags == rhs.flags );
1958 # endif
1959  }
1960 
1961  bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
1962  {
1963  return !operator==( rhs );
1964  }
1965 #endif
1966 
1967  public:
1969  const void * pNext = {};
1973  };
1974 
1975  template <>
1977  {
1978  using Type = GeometryNV;
1979  };
1980 
1982  {
1984 
1985  static const bool allowDuplicate = false;
1987 
1988 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
1991  uint32_t instanceCount_ = {},
1992  uint32_t geometryCount_ = {},
1993  const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {},
1994  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
1995  : pNext( pNext_ )
1996  , type( type_ )
1997  , flags( flags_ )
1998  , instanceCount( instanceCount_ )
1999  , geometryCount( geometryCount_ )
2000  , pGeometries( pGeometries_ )
2001  {
2002  }
2003 
2005 
2007  : AccelerationStructureInfoNV( *reinterpret_cast<AccelerationStructureInfoNV const *>( &rhs ) )
2008  {
2009  }
2010 
2011 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
2014  uint32_t instanceCount_,
2016  const void * pNext_ = nullptr )
2017  : pNext( pNext_ )
2018  , type( type_ )
2019  , flags( flags_ )
2020  , instanceCount( instanceCount_ )
2021  , geometryCount( static_cast<uint32_t>( geometries_.size() ) )
2022  , pGeometries( geometries_.data() )
2023  {
2024  }
2025 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
2026 
2028 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
2029 
2031  {
2032  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>( &rhs );
2033  return *this;
2034  }
2035 
2036 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
2038  {
2039  pNext = pNext_;
2040  return *this;
2041  }
2042 
2044  {
2045  type = type_;
2046  return *this;
2047  }
2048 
2050  {
2051  flags = flags_;
2052  return *this;
2053  }
2054 
2056  {
2057  instanceCount = instanceCount_;
2058  return *this;
2059  }
2060 
2062  {
2063  geometryCount = geometryCount_;
2064  return *this;
2065  }
2066 
2068  {
2069  pGeometries = pGeometries_;
2070  return *this;
2071  }
2072 
2073 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
2076  {
2077  geometryCount = static_cast<uint32_t>( geometries_.size() );
2078  pGeometries = geometries_.data();
2079  return *this;
2080  }
2081 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
2082 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
2083 
2085  {
2086  return *reinterpret_cast<const VkAccelerationStructureInfoNV *>( this );
2087  }
2088 
2090  {
2091  return *reinterpret_cast<VkAccelerationStructureInfoNV *>( this );
2092  }
2093 
2094 #if defined( VULKAN_HPP_USE_REFLECT )
2095 # if 14 <= VULKAN_HPP_CPP_VERSION
2096  auto
2097 # else
2098  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
2099  const void * const &,
2102  uint32_t const &,
2103  uint32_t const &,
2104  const VULKAN_HPP_NAMESPACE::GeometryNV * const &>
2105 # endif
2106  reflect() const VULKAN_HPP_NOEXCEPT
2107  {
2108  return std::tie( sType, pNext, type, flags, instanceCount, geometryCount, pGeometries );
2109  }
2110 #endif
2111 
2112 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
2113  auto operator<=>( AccelerationStructureInfoNV const & ) const = default;
2114 #else
2116  {
2117 # if defined( VULKAN_HPP_USE_REFLECT )
2118  return this->reflect() == rhs.reflect();
2119 # else
2120  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) &&
2121  ( geometryCount == rhs.geometryCount ) && ( pGeometries == rhs.pGeometries );
2122 # endif
2123  }
2124 
2126  {
2127  return !operator==( rhs );
2128  }
2129 #endif
2130 
2131  public:
2133  const void * pNext = {};
2136  uint32_t instanceCount = {};
2137  uint32_t geometryCount = {};
2139  };
2140 
2141  template <>
2143  {
2145  };
2146 
2148  {
2150 
2151  static const bool allowDuplicate = false;
2153 
2154 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
2157  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
2158  : pNext( pNext_ )
2159  , compactedSize( compactedSize_ )
2160  , info( info_ )
2161  {
2162  }
2163 
2165 
2167  : AccelerationStructureCreateInfoNV( *reinterpret_cast<AccelerationStructureCreateInfoNV const *>( &rhs ) )
2168  {
2169  }
2170 
2172 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
2173 
2175  {
2176  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>( &rhs );
2177  return *this;
2178  }
2179 
2180 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
2182  {
2183  pNext = pNext_;
2184  return *this;
2185  }
2186 
2188  {
2189  compactedSize = compactedSize_;
2190  return *this;
2191  }
2192 
2194  {
2195  info = info_;
2196  return *this;
2197  }
2198 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
2199 
2201  {
2202  return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( this );
2203  }
2204 
2206  {
2207  return *reinterpret_cast<VkAccelerationStructureCreateInfoNV *>( this );
2208  }
2209 
2210 #if defined( VULKAN_HPP_USE_REFLECT )
2211 # if 14 <= VULKAN_HPP_CPP_VERSION
2212  auto
2213 # else
2214  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
2215  const void * const &,
2218 # endif
2219  reflect() const VULKAN_HPP_NOEXCEPT
2220  {
2221  return std::tie( sType, pNext, compactedSize, info );
2222  }
2223 #endif
2224 
2225 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
2226  auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default;
2227 #else
2229  {
2230 # if defined( VULKAN_HPP_USE_REFLECT )
2231  return this->reflect() == rhs.reflect();
2232 # else
2233  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && ( info == rhs.info );
2234 # endif
2235  }
2236 
2238  {
2239  return !operator==( rhs );
2240  }
2241 #endif
2242 
2243  public:
2245  const void * pNext = {};
2248  };
2249 
2250  template <>
2252  {
2254  };
2255 
2257  {
2259 
2260  static const bool allowDuplicate = false;
2262 
2263 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
2265  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
2266  : pNext( pNext_ )
2267  , accelerationStructure( accelerationStructure_ )
2268  {
2269  }
2270 
2272 
2274  : AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs ) )
2275  {
2276  }
2277 
2279 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
2280 
2282  {
2283  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
2284  return *this;
2285  }
2286 
2287 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
2289  {
2290  pNext = pNext_;
2291  return *this;
2292  }
2293 
2296  {
2297  accelerationStructure = accelerationStructure_;
2298  return *this;
2299  }
2300 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
2301 
2303  {
2304  return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( this );
2305  }
2306 
2308  {
2309  return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR *>( this );
2310  }
2311 
2312 #if defined( VULKAN_HPP_USE_REFLECT )
2313 # if 14 <= VULKAN_HPP_CPP_VERSION
2314  auto
2315 # else
2316  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &>
2317 # endif
2318  reflect() const VULKAN_HPP_NOEXCEPT
2319  {
2320  return std::tie( sType, pNext, accelerationStructure );
2321  }
2322 #endif
2323 
2324 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
2325  auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default;
2326 #else
2328  {
2329 # if defined( VULKAN_HPP_USE_REFLECT )
2330  return this->reflect() == rhs.reflect();
2331 # else
2332  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure );
2333 # endif
2334  }
2335 
2337  {
2338  return !operator==( rhs );
2339  }
2340 #endif
2341 
2342  public:
2344  const void * pNext = {};
2346  };
2347 
2348  template <>
2350  {
2352  };
2353 
2355  {
2357 
2358  static const bool allowDuplicate = false;
2360 
2361 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
2363  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
2364  : pNext( pNext_ )
2365  , vertexData( vertexData_ )
2366  {
2367  }
2368 
2371 
2373  : AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast<AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs ) )
2374  {
2375  }
2376 
2379 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
2380 
2382  {
2383  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs );
2384  return *this;
2385  }
2386 
2387 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
2389  {
2390  pNext = pNext_;
2391  return *this;
2392  }
2393 
2396  {
2397  vertexData = vertexData_;
2398  return *this;
2399  }
2400 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
2401 
2403  {
2404  return *reinterpret_cast<const VkAccelerationStructureGeometryMotionTrianglesDataNV *>( this );
2405  }
2406 
2408  {
2409  return *reinterpret_cast<VkAccelerationStructureGeometryMotionTrianglesDataNV *>( this );
2410  }
2411 
2412 #if defined( VULKAN_HPP_USE_REFLECT )
2413 # if 14 <= VULKAN_HPP_CPP_VERSION
2414  auto
2415 # else
2416  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
2417 # endif
2418  reflect() const VULKAN_HPP_NOEXCEPT
2419  {
2420  return std::tie( sType, pNext, vertexData );
2421  }
2422 #endif
2423 
2424  public:
2426  const void * pNext = {};
2428  };
2429 
2430  template <>
2432  {
2434  };
2435 
2437  {
2439 
2440 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
2441  VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array<std::array<float, 4>, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix( matrix_ ) {}
2442 
2444 
2445  TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT : TransformMatrixKHR( *reinterpret_cast<TransformMatrixKHR const *>( &rhs ) ) {}
2446 
2448 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
2449 
2451  {
2452  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const *>( &rhs );
2453  return *this;
2454  }
2455 
2456 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
2457  VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array<std::array<float, 4>, 3> matrix_ ) VULKAN_HPP_NOEXCEPT
2458  {
2459  matrix = matrix_;
2460  return *this;
2461  }
2462 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
2463 
2465  {
2466  return *reinterpret_cast<const VkTransformMatrixKHR *>( this );
2467  }
2468 
2470  {
2471  return *reinterpret_cast<VkTransformMatrixKHR *>( this );
2472  }
2473 
2474 #if defined( VULKAN_HPP_USE_REFLECT )
2475 # if 14 <= VULKAN_HPP_CPP_VERSION
2476  auto
2477 # else
2478  std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> const &>
2479 # endif
2480  reflect() const VULKAN_HPP_NOEXCEPT
2481  {
2482  return std::tie( matrix );
2483  }
2484 #endif
2485 
2486 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
2487  auto operator<=>( TransformMatrixKHR const & ) const = default;
2488 #else
2490  {
2491 # if defined( VULKAN_HPP_USE_REFLECT )
2492  return this->reflect() == rhs.reflect();
2493 # else
2494  return ( matrix == rhs.matrix );
2495 # endif
2496  }
2497 
2499  {
2500  return !operator==( rhs );
2501  }
2502 #endif
2503 
2504  public:
2506  };
2507  using TransformMatrixNV = TransformMatrixKHR;
2508 
2510  {
2512 
2513 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
2515  uint32_t instanceCustomIndex_ = {},
2516  uint32_t mask_ = {},
2517  uint32_t instanceShaderBindingTableRecordOffset_ = {},
2519  uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
2520  : transform( transform_ )
2521  , instanceCustomIndex( instanceCustomIndex_ )
2522  , mask( mask_ )
2523  , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ )
2524  , flags( flags_ )
2525  , accelerationStructureReference( accelerationStructureReference_ )
2526  {
2527  }
2528 
2530 
2532  : AccelerationStructureInstanceKHR( *reinterpret_cast<AccelerationStructureInstanceKHR const *>( &rhs ) )
2533  {
2534  }
2535 
2537 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
2538 
2540  {
2541  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const *>( &rhs );
2542  return *this;
2543  }
2544 
2545 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
2547  {
2548  transform = transform_;
2549  return *this;
2550  }
2551 
2553  {
2554  instanceCustomIndex = instanceCustomIndex_;
2555  return *this;
2556  }
2557 
2559  {
2560  mask = mask_;
2561  return *this;
2562  }
2563 
2565  setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
2566  {
2567  instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
2568  return *this;
2569  }
2570 
2572  {
2573  flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
2574  return *this;
2575  }
2576 
2578  {
2579  accelerationStructureReference = accelerationStructureReference_;
2580  return *this;
2581  }
2582 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
2583 
2585  {
2586  return *reinterpret_cast<const VkAccelerationStructureInstanceKHR *>( this );
2587  }
2588 
2590  {
2591  return *reinterpret_cast<VkAccelerationStructureInstanceKHR *>( this );
2592  }
2593 
2594 #if defined( VULKAN_HPP_USE_REFLECT )
2595 # if 14 <= VULKAN_HPP_CPP_VERSION
2596  auto
2597 # else
2598  std::tuple<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &,
2599  uint32_t const &,
2600  uint32_t const &,
2601  uint32_t const &,
2603  uint64_t const &>
2604 # endif
2605  reflect() const VULKAN_HPP_NOEXCEPT
2606  {
2608  }
2609 #endif
2610 
2611 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
2612  auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default;
2613 #else
2615  {
2616 # if defined( VULKAN_HPP_USE_REFLECT )
2617  return this->reflect() == rhs.reflect();
2618 # else
2619  return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) &&
2620  ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) &&
2621  ( accelerationStructureReference == rhs.accelerationStructureReference );
2622 # endif
2623  }
2624 
2626  {
2627  return !operator==( rhs );
2628  }
2629 #endif
2630 
2631  public:
2633  uint32_t instanceCustomIndex : 24;
2634  uint32_t mask : 8;
2638  };
2639  using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
2640 
2642  {
2644 
2645 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
2647  VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {},
2648  uint32_t instanceCustomIndex_ = {},
2649  uint32_t mask_ = {},
2650  uint32_t instanceShaderBindingTableRecordOffset_ = {},
2652  uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
2653  : transformT0( transformT0_ )
2654  , transformT1( transformT1_ )
2655  , instanceCustomIndex( instanceCustomIndex_ )
2656  , mask( mask_ )
2657  , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ )
2658  , flags( flags_ )
2659  , accelerationStructureReference( accelerationStructureReference_ )
2660  {
2661  }
2662 
2665 
2667  : AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast<AccelerationStructureMatrixMotionInstanceNV const *>( &rhs ) )
2668  {
2669  }
2670 
2672 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
2673 
2675  {
2676  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const *>( &rhs );
2677  return *this;
2678  }
2679 
2680 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
2683  {
2684  transformT0 = transformT0_;
2685  return *this;
2686  }
2687 
2690  {
2691  transformT1 = transformT1_;
2692  return *this;
2693  }
2694 
2696  {
2697  instanceCustomIndex = instanceCustomIndex_;
2698  return *this;
2699  }
2700 
2702  {
2703  mask = mask_;
2704  return *this;
2705  }
2706 
2708  setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
2709  {
2710  instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
2711  return *this;
2712  }
2713 
2715  {
2716  flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
2717  return *this;
2718  }
2719 
2721  setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
2722  {
2723  accelerationStructureReference = accelerationStructureReference_;
2724  return *this;
2725  }
2726 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
2727 
2729  {
2730  return *reinterpret_cast<const VkAccelerationStructureMatrixMotionInstanceNV *>( this );
2731  }
2732 
2734  {
2735  return *reinterpret_cast<VkAccelerationStructureMatrixMotionInstanceNV *>( this );
2736  }
2737 
2738 #if defined( VULKAN_HPP_USE_REFLECT )
2739 # if 14 <= VULKAN_HPP_CPP_VERSION
2740  auto
2741 # else
2742  std::tuple<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &,
2744  uint32_t const &,
2745  uint32_t const &,
2746  uint32_t const &,
2748  uint64_t const &>
2749 # endif
2750  reflect() const VULKAN_HPP_NOEXCEPT
2751  {
2753  }
2754 #endif
2755 
2756 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
2757  auto operator<=>( AccelerationStructureMatrixMotionInstanceNV const & ) const = default;
2758 #else
2760  {
2761 # if defined( VULKAN_HPP_USE_REFLECT )
2762  return this->reflect() == rhs.reflect();
2763 # else
2764  return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) &&
2765  ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) &&
2766  ( accelerationStructureReference == rhs.accelerationStructureReference );
2767 # endif
2768  }
2769 
2771  {
2772  return !operator==( rhs );
2773  }
2774 #endif
2775 
2776  public:
2779  uint32_t instanceCustomIndex : 24;
2780  uint32_t mask : 8;
2784  };
2785 
2787  {
2789 
2790  static const bool allowDuplicate = false;
2792 
2793 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
2796  VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {},
2797  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
2798  : pNext( pNext_ )
2799  , type( type_ )
2800  , accelerationStructure( accelerationStructure_ )
2801  {
2802  }
2803 
2806 
2808  : AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs ) )
2809  {
2810  }
2811 
2813 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
2814 
2816  {
2817  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
2818  return *this;
2819  }
2820 
2821 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
2823  {
2824  pNext = pNext_;
2825  return *this;
2826  }
2827 
2830  {
2831  type = type_;
2832  return *this;
2833  }
2834 
2837  {
2838  accelerationStructure = accelerationStructure_;
2839  return *this;
2840  }
2841 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
2842 
2844  {
2845  return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
2846  }
2847 
2849  {
2850  return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
2851  }
2852 
2853 #if defined( VULKAN_HPP_USE_REFLECT )
2854 # if 14 <= VULKAN_HPP_CPP_VERSION
2855  auto
2856 # else
2857  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
2858  const void * const &,
2861 # endif
2862  reflect() const VULKAN_HPP_NOEXCEPT
2863  {
2864  return std::tie( sType, pNext, type, accelerationStructure );
2865  }
2866 #endif
2867 
2868 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
2869  auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default;
2870 #else
2872  {
2873 # if defined( VULKAN_HPP_USE_REFLECT )
2874  return this->reflect() == rhs.reflect();
2875 # else
2876  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( accelerationStructure == rhs.accelerationStructure );
2877 # endif
2878  }
2879 
2881  {
2882  return !operator==( rhs );
2883  }
2884 #endif
2885 
2886  public:
2888  const void * pNext = {};
2891  };
2892 
2893  template <>
2895  {
2897  };
2898 
2900  {
2902 
2903  static const bool allowDuplicate = false;
2905 
2906 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
2909  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
2910  : pNext( pNext_ )
2911  , maxInstances( maxInstances_ )
2912  , flags( flags_ )
2913  {
2914  }
2915 
2917 
2919  : AccelerationStructureMotionInfoNV( *reinterpret_cast<AccelerationStructureMotionInfoNV const *>( &rhs ) )
2920  {
2921  }
2922 
2924 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
2925 
2927  {
2928  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const *>( &rhs );
2929  return *this;
2930  }
2931 
2932 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
2934  {
2935  pNext = pNext_;
2936  return *this;
2937  }
2938 
2940  {
2941  maxInstances = maxInstances_;
2942  return *this;
2943  }
2944 
2947  {
2948  flags = flags_;
2949  return *this;
2950  }
2951 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
2952 
2954  {
2955  return *reinterpret_cast<const VkAccelerationStructureMotionInfoNV *>( this );
2956  }
2957 
2959  {
2960  return *reinterpret_cast<VkAccelerationStructureMotionInfoNV *>( this );
2961  }
2962 
2963 #if defined( VULKAN_HPP_USE_REFLECT )
2964 # if 14 <= VULKAN_HPP_CPP_VERSION
2965  auto
2966 # else
2967  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
2968  const void * const &,
2969  uint32_t const &,
2971 # endif
2972  reflect() const VULKAN_HPP_NOEXCEPT
2973  {
2974  return std::tie( sType, pNext, maxInstances, flags );
2975  }
2976 #endif
2977 
2978 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
2979  auto operator<=>( AccelerationStructureMotionInfoNV const & ) const = default;
2980 #else
2982  {
2983 # if defined( VULKAN_HPP_USE_REFLECT )
2984  return this->reflect() == rhs.reflect();
2985 # else
2986  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInstances == rhs.maxInstances ) && ( flags == rhs.flags );
2987 # endif
2988  }
2989 
2991  {
2992  return !operator==( rhs );
2993  }
2994 #endif
2995 
2996  public:
2998  const void * pNext = {};
2999  uint32_t maxInstances = {};
3001  };
3002 
3003  template <>
3005  {
3007  };
3008 
3009  struct SRTDataNV
3010  {
3012 
3013 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
3015  float a_ = {},
3016  float b_ = {},
3017  float pvx_ = {},
3018  float sy_ = {},
3019  float c_ = {},
3020  float pvy_ = {},
3021  float sz_ = {},
3022  float pvz_ = {},
3023  float qx_ = {},
3024  float qy_ = {},
3025  float qz_ = {},
3026  float qw_ = {},
3027  float tx_ = {},
3028  float ty_ = {},
3029  float tz_ = {} ) VULKAN_HPP_NOEXCEPT
3030  : sx( sx_ )
3031  , a( a_ )
3032  , b( b_ )
3033  , pvx( pvx_ )
3034  , sy( sy_ )
3035  , c( c_ )
3036  , pvy( pvy_ )
3037  , sz( sz_ )
3038  , pvz( pvz_ )
3039  , qx( qx_ )
3040  , qy( qy_ )
3041  , qz( qz_ )
3042  , qw( qw_ )
3043  , tx( tx_ )
3044  , ty( ty_ )
3045  , tz( tz_ )
3046  {
3047  }
3048 
3050 
3051  SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : SRTDataNV( *reinterpret_cast<SRTDataNV const *>( &rhs ) ) {}
3052 
3053  SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
3054 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
3055 
3057  {
3058  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SRTDataNV const *>( &rhs );
3059  return *this;
3060  }
3061 
3062 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
3064  {
3065  sx = sx_;
3066  return *this;
3067  }
3068 
3070  {
3071  a = a_;
3072  return *this;
3073  }
3074 
3076  {
3077  b = b_;
3078  return *this;
3079  }
3080 
3082  {
3083  pvx = pvx_;
3084  return *this;
3085  }
3086 
3088  {
3089  sy = sy_;
3090  return *this;
3091  }
3092 
3094  {
3095  c = c_;
3096  return *this;
3097  }
3098 
3100  {
3101  pvy = pvy_;
3102  return *this;
3103  }
3104 
3106  {
3107  sz = sz_;
3108  return *this;
3109  }
3110 
3112  {
3113  pvz = pvz_;
3114  return *this;
3115  }
3116 
3118  {
3119  qx = qx_;
3120  return *this;
3121  }
3122 
3124  {
3125  qy = qy_;
3126  return *this;
3127  }
3128 
3130  {
3131  qz = qz_;
3132  return *this;
3133  }
3134 
3136  {
3137  qw = qw_;
3138  return *this;
3139  }
3140 
3142  {
3143  tx = tx_;
3144  return *this;
3145  }
3146 
3148  {
3149  ty = ty_;
3150  return *this;
3151  }
3152 
3154  {
3155  tz = tz_;
3156  return *this;
3157  }
3158 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
3159 
3160  operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT
3161  {
3162  return *reinterpret_cast<const VkSRTDataNV *>( this );
3163  }
3164 
3166  {
3167  return *reinterpret_cast<VkSRTDataNV *>( this );
3168  }
3169 
3170 #if defined( VULKAN_HPP_USE_REFLECT )
3171 # if 14 <= VULKAN_HPP_CPP_VERSION
3172  auto
3173 # else
3174  std::tuple<float const &,
3175  float const &,
3176  float const &,
3177  float const &,
3178  float const &,
3179  float const &,
3180  float const &,
3181  float const &,
3182  float const &,
3183  float const &,
3184  float const &,
3185  float const &,
3186  float const &,
3187  float const &,
3188  float const &,
3189  float const &>
3190 # endif
3191  reflect() const VULKAN_HPP_NOEXCEPT
3192  {
3193  return std::tie( sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz );
3194  }
3195 #endif
3196 
3197 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
3198  auto operator<=>( SRTDataNV const & ) const = default;
3199 #else
3200  bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
3201  {
3202 # if defined( VULKAN_HPP_USE_REFLECT )
3203  return this->reflect() == rhs.reflect();
3204 # else
3205  return ( sx == rhs.sx ) && ( a == rhs.a ) && ( b == rhs.b ) && ( pvx == rhs.pvx ) && ( sy == rhs.sy ) && ( c == rhs.c ) && ( pvy == rhs.pvy ) &&
3206  ( sz == rhs.sz ) && ( pvz == rhs.pvz ) && ( qx == rhs.qx ) && ( qy == rhs.qy ) && ( qz == rhs.qz ) && ( qw == rhs.qw ) && ( tx == rhs.tx ) &&
3207  ( ty == rhs.ty ) && ( tz == rhs.tz );
3208 # endif
3209  }
3210 
3211  bool operator!=( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
3212  {
3213  return !operator==( rhs );
3214  }
3215 #endif
3216 
3217  public:
3218  float sx = {};
3219  float a = {};
3220  float b = {};
3221  float pvx = {};
3222  float sy = {};
3223  float c = {};
3224  float pvy = {};
3225  float sz = {};
3226  float pvz = {};
3227  float qx = {};
3228  float qy = {};
3229  float qz = {};
3230  float qw = {};
3231  float tx = {};
3232  float ty = {};
3233  float tz = {};
3234  };
3235 
3237  {
3239 
3240 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
3242  VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {},
3243  uint32_t instanceCustomIndex_ = {},
3244  uint32_t mask_ = {},
3245  uint32_t instanceShaderBindingTableRecordOffset_ = {},
3247  uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
3248  : transformT0( transformT0_ )
3249  , transformT1( transformT1_ )
3250  , instanceCustomIndex( instanceCustomIndex_ )
3251  , mask( mask_ )
3252  , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ )
3253  , flags( flags_ )
3254  , accelerationStructureReference( accelerationStructureReference_ )
3255  {
3256  }
3257 
3259 
3261  : AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast<AccelerationStructureSRTMotionInstanceNV const *>( &rhs ) )
3262  {
3263  }
3264 
3266 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
3267 
3269  {
3270  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const *>( &rhs );
3271  return *this;
3272  }
3273 
3274 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
3277  {
3278  transformT0 = transformT0_;
3279  return *this;
3280  }
3281 
3284  {
3285  transformT1 = transformT1_;
3286  return *this;
3287  }
3288 
3290  {
3291  instanceCustomIndex = instanceCustomIndex_;
3292  return *this;
3293  }
3294 
3296  {
3297  mask = mask_;
3298  return *this;
3299  }
3300 
3302  setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
3303  {
3304  instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
3305  return *this;
3306  }
3307 
3309  {
3310  flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
3311  return *this;
3312  }
3313 
3315  setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
3316  {
3317  accelerationStructureReference = accelerationStructureReference_;
3318  return *this;
3319  }
3320 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
3321 
3323  {
3324  return *reinterpret_cast<const VkAccelerationStructureSRTMotionInstanceNV *>( this );
3325  }
3326 
3328  {
3329  return *reinterpret_cast<VkAccelerationStructureSRTMotionInstanceNV *>( this );
3330  }
3331 
3332 #if defined( VULKAN_HPP_USE_REFLECT )
3333 # if 14 <= VULKAN_HPP_CPP_VERSION
3334  auto
3335 # else
3336  std::tuple<VULKAN_HPP_NAMESPACE::SRTDataNV const &,
3338  uint32_t const &,
3339  uint32_t const &,
3340  uint32_t const &,
3342  uint64_t const &>
3343 # endif
3344  reflect() const VULKAN_HPP_NOEXCEPT
3345  {
3347  }
3348 #endif
3349 
3350 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
3351  auto operator<=>( AccelerationStructureSRTMotionInstanceNV const & ) const = default;
3352 #else
3354  {
3355 # if defined( VULKAN_HPP_USE_REFLECT )
3356  return this->reflect() == rhs.reflect();
3357 # else
3358  return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) &&
3359  ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) &&
3360  ( accelerationStructureReference == rhs.accelerationStructureReference );
3361 # endif
3362  }
3363 
3365  {
3366  return !operator==( rhs );
3367  }
3368 #endif
3369 
3370  public:
3373  uint32_t instanceCustomIndex : 24;
3374  uint32_t mask : 8;
3378  };
3379 
3381  {
3383 #if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
3384 
3386  : staticInstance( staticInstance_ )
3387  {
3388  }
3389 
3391  : matrixMotionInstance( matrixMotionInstance_ )
3392  {
3393  }
3394 
3396  : srtMotionInstance( srtMotionInstance_ )
3397  {
3398  }
3399 #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
3400 
3401 #if !defined( VULKAN_HPP_NO_UNION_SETTERS )
3404  {
3405  staticInstance = staticInstance_;
3406  return *this;
3407  }
3408 
3411  {
3412  matrixMotionInstance = matrixMotionInstance_;
3413  return *this;
3414  }
3415 
3418  {
3419  srtMotionInstance = srtMotionInstance_;
3420  return *this;
3421  }
3422 #endif /*VULKAN_HPP_NO_UNION_SETTERS*/
3423 
3425  {
3426  return *reinterpret_cast<const VkAccelerationStructureMotionInstanceDataNV *>( this );
3427  }
3428 
3430  {
3431  return *reinterpret_cast<VkAccelerationStructureMotionInstanceDataNV *>( this );
3432  }
3433 
3434 #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
3438 #else
3442 #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
3443  };
3444 
3446  {
3448 
3449 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
3454  : type( type_ )
3455  , flags( flags_ )
3456  , data( data_ )
3457  {
3458  }
3459 
3461 
3463  : AccelerationStructureMotionInstanceNV( *reinterpret_cast<AccelerationStructureMotionInstanceNV const *>( &rhs ) )
3464  {
3465  }
3466 
3468 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
3469 
3471  {
3472  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV const *>( &rhs );
3473  return *this;
3474  }
3475 
3476 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
3479  {
3480  type = type_;
3481  return *this;
3482  }
3483 
3486  {
3487  flags = flags_;
3488  return *this;
3489  }
3490 
3493  {
3494  data = data_;
3495  return *this;
3496  }
3497 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
3498 
3500  {
3501  return *reinterpret_cast<const VkAccelerationStructureMotionInstanceNV *>( this );
3502  }
3503 
3505  {
3506  return *reinterpret_cast<VkAccelerationStructureMotionInstanceNV *>( this );
3507  }
3508 
3509 #if defined( VULKAN_HPP_USE_REFLECT )
3510 # if 14 <= VULKAN_HPP_CPP_VERSION
3511  auto
3512 # else
3516 # endif
3517  reflect() const VULKAN_HPP_NOEXCEPT
3518  {
3519  return std::tie( type, flags, data );
3520  }
3521 #endif
3522 
3523  public:
3527  };
3528 
3530  {
3532 
3533 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
3534  VULKAN_HPP_CONSTEXPR MicromapUsageEXT( uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {} ) VULKAN_HPP_NOEXCEPT
3535  : count( count_ )
3536  , subdivisionLevel( subdivisionLevel_ )
3537  , format( format_ )
3538  {
3539  }
3540 
3542 
3543  MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapUsageEXT( *reinterpret_cast<MicromapUsageEXT const *>( &rhs ) ) {}
3544 
3546 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
3547 
3549  {
3550  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapUsageEXT const *>( &rhs );
3551  return *this;
3552  }
3553 
3554 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
3556  {
3557  count = count_;
3558  return *this;
3559  }
3560 
3562  {
3563  subdivisionLevel = subdivisionLevel_;
3564  return *this;
3565  }
3566 
3568  {
3569  format = format_;
3570  return *this;
3571  }
3572 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
3573 
3574  operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT
3575  {
3576  return *reinterpret_cast<const VkMicromapUsageEXT *>( this );
3577  }
3578 
3580  {
3581  return *reinterpret_cast<VkMicromapUsageEXT *>( this );
3582  }
3583 
3584 #if defined( VULKAN_HPP_USE_REFLECT )
3585 # if 14 <= VULKAN_HPP_CPP_VERSION
3586  auto
3587 # else
3588  std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
3589 # endif
3590  reflect() const VULKAN_HPP_NOEXCEPT
3591  {
3592  return std::tie( count, subdivisionLevel, format );
3593  }
3594 #endif
3595 
3596 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
3597  auto operator<=>( MicromapUsageEXT const & ) const = default;
3598 #else
3600  {
3601 # if defined( VULKAN_HPP_USE_REFLECT )
3602  return this->reflect() == rhs.reflect();
3603 # else
3604  return ( count == rhs.count ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format );
3605 # endif
3606  }
3607 
3609  {
3610  return !operator==( rhs );
3611  }
3612 #endif
3613 
3614  public:
3615  uint32_t count = {};
3616  uint32_t subdivisionLevel = {};
3617  uint32_t format = {};
3618  };
3619 
3621  {
3623 
3624  static const bool allowDuplicate = false;
3626 
3627 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
3631  VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {},
3632  uint32_t baseTriangle_ = {},
3633  uint32_t usageCountsCount_ = {},
3634  const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {},
3635  const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {},
3636  VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {},
3637  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
3638  : pNext( pNext_ )
3639  , indexType( indexType_ )
3640  , indexBuffer( indexBuffer_ )
3641  , indexStride( indexStride_ )
3642  , baseTriangle( baseTriangle_ )
3643  , usageCountsCount( usageCountsCount_ )
3644  , pUsageCounts( pUsageCounts_ )
3645  , ppUsageCounts( ppUsageCounts_ )
3646  , micromap( micromap_ )
3647  {
3648  }
3649 
3652 
3654  : AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast<AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs ) )
3655  {
3656  }
3657 
3658 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
3662  VULKAN_HPP_NAMESPACE::DeviceSize indexStride_,
3663  uint32_t baseTriangle_,
3666  VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {},
3667  void * pNext_ = nullptr )
3668  : pNext( pNext_ )
3669  , indexType( indexType_ )
3670  , indexBuffer( indexBuffer_ )
3671  , indexStride( indexStride_ )
3672  , baseTriangle( baseTriangle_ )
3673  , usageCountsCount( static_cast<uint32_t>( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) )
3674  , pUsageCounts( usageCounts_.data() )
3675  , ppUsageCounts( pUsageCounts_.data() )
3676  , micromap( micromap_ )
3677  {
3678 # ifdef VULKAN_HPP_NO_EXCEPTIONS
3679  VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 );
3680 # else
3681  if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) )
3682  {
3683  throw LogicError(
3685  "::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" );
3686  }
3687 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
3688  }
3689 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
3690 
3692 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
3693 
3695  {
3696  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs );
3697  return *this;
3698  }
3699 
3700 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
3702  {
3703  pNext = pNext_;
3704  return *this;
3705  }
3706 
3708  {
3709  indexType = indexType_;
3710  return *this;
3711  }
3712 
3715  {
3716  indexBuffer = indexBuffer_;
3717  return *this;
3718  }
3719 
3722  {
3723  indexStride = indexStride_;
3724  return *this;
3725  }
3726 
3728  {
3729  baseTriangle = baseTriangle_;
3730  return *this;
3731  }
3732 
3734  {
3735  usageCountsCount = usageCountsCount_;
3736  return *this;
3737  }
3738 
3741  {
3742  pUsageCounts = pUsageCounts_;
3743  return *this;
3744  }
3745 
3746 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
3749  {
3750  usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
3751  pUsageCounts = usageCounts_.data();
3752  return *this;
3753  }
3754 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
3755 
3758  {
3759  ppUsageCounts = ppUsageCounts_;
3760  return *this;
3761  }
3762 
3763 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
3766  {
3767  usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
3768  ppUsageCounts = pUsageCounts_.data();
3769  return *this;
3770  }
3771 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
3772 
3774  {
3775  micromap = micromap_;
3776  return *this;
3777  }
3778 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
3779 
3781  {
3782  return *reinterpret_cast<const VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
3783  }
3784 
3786  {
3787  return *reinterpret_cast<VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
3788  }
3789 
3790 #if defined( VULKAN_HPP_USE_REFLECT )
3791 # if 14 <= VULKAN_HPP_CPP_VERSION
3792  auto
3793 # else
3794  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
3795  void * const &,
3799  uint32_t const &,
3800  uint32_t const &,
3802  const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &,
3804 # endif
3805  reflect() const VULKAN_HPP_NOEXCEPT
3806  {
3808  }
3809 #endif
3810 
3811  public:
3813  void * pNext = {};
3817  uint32_t baseTriangle = {};
3818  uint32_t usageCountsCount = {};
3822  };
3823 
3824  template <>
3826  {
3828  };
3829 
3831  {
3833 
3834  static const bool allowDuplicate = false;
3836 
3837 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
3838  VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
3839  : pNext( pNext_ )
3840  , pVersionData( pVersionData_ )
3841  {
3842  }
3843 
3845 
3847  : AccelerationStructureVersionInfoKHR( *reinterpret_cast<AccelerationStructureVersionInfoKHR const *>( &rhs ) )
3848  {
3849  }
3850 
3852 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
3853 
3855  {
3856  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const *>( &rhs );
3857  return *this;
3858  }
3859 
3860 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
3862  {
3863  pNext = pNext_;
3864  return *this;
3865  }
3866 
3868  {
3869  pVersionData = pVersionData_;
3870  return *this;
3871  }
3872 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
3873 
3875  {
3876  return *reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( this );
3877  }
3878 
3880  {
3881  return *reinterpret_cast<VkAccelerationStructureVersionInfoKHR *>( this );
3882  }
3883 
3884 #if defined( VULKAN_HPP_USE_REFLECT )
3885 # if 14 <= VULKAN_HPP_CPP_VERSION
3886  auto
3887 # else
3888  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const uint8_t * const &>
3889 # endif
3890  reflect() const VULKAN_HPP_NOEXCEPT
3891  {
3892  return std::tie( sType, pNext, pVersionData );
3893  }
3894 #endif
3895 
3896 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
3897  auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default;
3898 #else
3900  {
3901 # if defined( VULKAN_HPP_USE_REFLECT )
3902  return this->reflect() == rhs.reflect();
3903 # else
3904  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData );
3905 # endif
3906  }
3907 
3909  {
3910  return !operator==( rhs );
3911  }
3912 #endif
3913 
3914  public:
3916  const void * pNext = {};
3917  const uint8_t * pVersionData = {};
3918  };
3919 
3920  template <>
3922  {
3924  };
3925 
3927  {
3929 
3930  static const bool allowDuplicate = false;
3932 
3933 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
3935  uint64_t timeout_ = {},
3936  VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
3937  VULKAN_HPP_NAMESPACE::Fence fence_ = {},
3938  uint32_t deviceMask_ = {},
3939  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
3940  : pNext( pNext_ )
3941  , swapchain( swapchain_ )
3942  , timeout( timeout_ )
3943  , semaphore( semaphore_ )
3944  , fence( fence_ )
3945  , deviceMask( deviceMask_ )
3946  {
3947  }
3948 
3950 
3952  : AcquireNextImageInfoKHR( *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs ) )
3953  {
3954  }
3955 
3957 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
3958 
3960  {
3961  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>( &rhs );
3962  return *this;
3963  }
3964 
3965 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
3967  {
3968  pNext = pNext_;
3969  return *this;
3970  }
3971 
3973  {
3974  swapchain = swapchain_;
3975  return *this;
3976  }
3977 
3979  {
3980  timeout = timeout_;
3981  return *this;
3982  }
3983 
3985  {
3986  semaphore = semaphore_;
3987  return *this;
3988  }
3989 
3991  {
3992  fence = fence_;
3993  return *this;
3994  }
3995 
3997  {
3998  deviceMask = deviceMask_;
3999  return *this;
4000  }
4001 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
4002 
4004  {
4005  return *reinterpret_cast<const VkAcquireNextImageInfoKHR *>( this );
4006  }
4007 
4009  {
4010  return *reinterpret_cast<VkAcquireNextImageInfoKHR *>( this );
4011  }
4012 
4013 #if defined( VULKAN_HPP_USE_REFLECT )
4014 # if 14 <= VULKAN_HPP_CPP_VERSION
4015  auto
4016 # else
4017  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
4018  const void * const &,
4020  uint64_t const &,
4023  uint32_t const &>
4024 # endif
4025  reflect() const VULKAN_HPP_NOEXCEPT
4026  {
4027  return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask );
4028  }
4029 #endif
4030 
4031 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
4032  auto operator<=>( AcquireNextImageInfoKHR const & ) const = default;
4033 #else
4035  {
4036 # if defined( VULKAN_HPP_USE_REFLECT )
4037  return this->reflect() == rhs.reflect();
4038 # else
4039  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( timeout == rhs.timeout ) &&
4040  ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && ( deviceMask == rhs.deviceMask );
4041 # endif
4042  }
4043 
4045  {
4046  return !operator==( rhs );
4047  }
4048 #endif
4049 
4050  public:
4052  const void * pNext = {};
4054  uint64_t timeout = {};
4057  uint32_t deviceMask = {};
4058  };
4059 
4060  template <>
4062  {
4064  };
4065 
4067  {
4069 
4070  static const bool allowDuplicate = false;
4072 
4073 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
4075  uint64_t timeout_ = {},
4076  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
4077  : pNext( pNext_ )
4078  , flags( flags_ )
4079  , timeout( timeout_ )
4080  {
4081  }
4082 
4084 
4086  : AcquireProfilingLockInfoKHR( *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs ) )
4087  {
4088  }
4089 
4091 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
4092 
4094  {
4095  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>( &rhs );
4096  return *this;
4097  }
4098 
4099 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
4101  {
4102  pNext = pNext_;
4103  return *this;
4104  }
4105 
4107  {
4108  flags = flags_;
4109  return *this;
4110  }
4111 
4113  {
4114  timeout = timeout_;
4115  return *this;
4116  }
4117 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
4118 
4120  {
4121  return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( this );
4122  }
4123 
4125  {
4126  return *reinterpret_cast<VkAcquireProfilingLockInfoKHR *>( this );
4127  }
4128 
4129 #if defined( VULKAN_HPP_USE_REFLECT )
4130 # if 14 <= VULKAN_HPP_CPP_VERSION
4131  auto
4132 # else
4133  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR const &, uint64_t const &>
4134 # endif
4135  reflect() const VULKAN_HPP_NOEXCEPT
4136  {
4137  return std::tie( sType, pNext, flags, timeout );
4138  }
4139 #endif
4140 
4141 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
4142  auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default;
4143 #else
4145  {
4146 # if defined( VULKAN_HPP_USE_REFLECT )
4147  return this->reflect() == rhs.reflect();
4148 # else
4149  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout );
4150 # endif
4151  }
4152 
4154  {
4155  return !operator==( rhs );
4156  }
4157 #endif
4158 
4159  public:
4161  const void * pNext = {};
4163  uint64_t timeout = {};
4164  };
4165 
4166  template <>
4168  {
4170  };
4171 
4173  {
4175 
4176 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
4178  PFN_vkAllocationFunction pfnAllocation_ = {},
4179  PFN_vkReallocationFunction pfnReallocation_ = {},
4180  PFN_vkFreeFunction pfnFree_ = {},
4181  PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {},
4182  PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT
4183  : pUserData( pUserData_ )
4184  , pfnAllocation( pfnAllocation_ )
4185  , pfnReallocation( pfnReallocation_ )
4186  , pfnFree( pfnFree_ )
4187  , pfnInternalAllocation( pfnInternalAllocation_ )
4188  , pfnInternalFree( pfnInternalFree_ )
4189  {
4190  }
4191 
4193 
4194  AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT : AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
4195  {
4196  }
4197 
4199 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
4200 
4202  {
4203  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>( &rhs );
4204  return *this;
4205  }
4206 
4207 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
4209  {
4210  pUserData = pUserData_;
4211  return *this;
4212  }
4213 
4215  {
4216  pfnAllocation = pfnAllocation_;
4217  return *this;
4218  }
4219 
4221  {
4222  pfnReallocation = pfnReallocation_;
4223  return *this;
4224  }
4225 
4227  {
4228  pfnFree = pfnFree_;
4229  return *this;
4230  }
4231 
4233  {
4234  pfnInternalAllocation = pfnInternalAllocation_;
4235  return *this;
4236  }
4237 
4239  {
4240  pfnInternalFree = pfnInternalFree_;
4241  return *this;
4242  }
4243 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
4244 
4246  {
4247  return *reinterpret_cast<const VkAllocationCallbacks *>( this );
4248  }
4249 
4251  {
4252  return *reinterpret_cast<VkAllocationCallbacks *>( this );
4253  }
4254 
4255 #if defined( VULKAN_HPP_USE_REFLECT )
4256 # if 14 <= VULKAN_HPP_CPP_VERSION
4257  auto
4258 # else
4259  std::tuple<void * const &,
4260  PFN_vkAllocationFunction const &,
4262  PFN_vkFreeFunction const &,
4265 # endif
4266  reflect() const VULKAN_HPP_NOEXCEPT
4267  {
4269  }
4270 #endif
4271 
4273  {
4274 #if defined( VULKAN_HPP_USE_REFLECT )
4275  return this->reflect() == rhs.reflect();
4276 #else
4277  return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && ( pfnReallocation == rhs.pfnReallocation ) &&
4278  ( pfnFree == rhs.pfnFree ) && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree );
4279 #endif
4280  }
4281 
4283  {
4284  return !operator==( rhs );
4285  }
4286 
4287  public:
4288  void * pUserData = {};
4294  };
4295 
4297  {
4299 
4300  static const bool allowDuplicate = false;
4302 
4303 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
4305  AmigoProfilingSubmitInfoSEC( uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
4306  : pNext( pNext_ )
4307  , firstDrawTimestamp( firstDrawTimestamp_ )
4308  , swapBufferTimestamp( swapBufferTimestamp_ )
4309  {
4310  }
4311 
4313 
4315  : AmigoProfilingSubmitInfoSEC( *reinterpret_cast<AmigoProfilingSubmitInfoSEC const *>( &rhs ) )
4316  {
4317  }
4318 
4320 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
4321 
4323  {
4324  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC const *>( &rhs );
4325  return *this;
4326  }
4327 
4328 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
4330  {
4331  pNext = pNext_;
4332  return *this;
4333  }
4334 
4336  {
4337  firstDrawTimestamp = firstDrawTimestamp_;
4338  return *this;
4339  }
4340 
4342  {
4343  swapBufferTimestamp = swapBufferTimestamp_;
4344  return *this;
4345  }
4346 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
4347 
4349  {
4350  return *reinterpret_cast<const VkAmigoProfilingSubmitInfoSEC *>( this );
4351  }
4352 
4354  {
4355  return *reinterpret_cast<VkAmigoProfilingSubmitInfoSEC *>( this );
4356  }
4357 
4358 #if defined( VULKAN_HPP_USE_REFLECT )
4359 # if 14 <= VULKAN_HPP_CPP_VERSION
4360  auto
4361 # else
4362  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint64_t const &>
4363 # endif
4364  reflect() const VULKAN_HPP_NOEXCEPT
4365  {
4366  return std::tie( sType, pNext, firstDrawTimestamp, swapBufferTimestamp );
4367  }
4368 #endif
4369 
4370 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
4371  auto operator<=>( AmigoProfilingSubmitInfoSEC const & ) const = default;
4372 #else
4374  {
4375 # if defined( VULKAN_HPP_USE_REFLECT )
4376  return this->reflect() == rhs.reflect();
4377 # else
4378  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( firstDrawTimestamp == rhs.firstDrawTimestamp ) &&
4379  ( swapBufferTimestamp == rhs.swapBufferTimestamp );
4380 # endif
4381  }
4382 
4384  {
4385  return !operator==( rhs );
4386  }
4387 #endif
4388 
4389  public:
4391  const void * pNext = {};
4392  uint64_t firstDrawTimestamp = {};
4393  uint64_t swapBufferTimestamp = {};
4394  };
4395 
4396  template <>
4398  {
4400  };
4401 
4403  {
4405 
4406 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
4411  : r( r_ )
4412  , g( g_ )
4413  , b( b_ )
4414  , a( a_ )
4415  {
4416  }
4417 
4419 
4420  ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT : ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) ) {}
4421 
4423 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
4424 
4426  {
4427  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>( &rhs );
4428  return *this;
4429  }
4430 
4431 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
4433  {
4434  r = r_;
4435  return *this;
4436  }
4437 
4439  {
4440  g = g_;
4441  return *this;
4442  }
4443 
4445  {
4446  b = b_;
4447  return *this;
4448  }
4449 
4451  {
4452  a = a_;
4453  return *this;
4454  }
4455 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
4456 
4457  operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT
4458  {
4459  return *reinterpret_cast<const VkComponentMapping *>( this );
4460  }
4461 
4463  {
4464  return *reinterpret_cast<VkComponentMapping *>( this );
4465  }
4466 
4467 #if defined( VULKAN_HPP_USE_REFLECT )
4468 # if 14 <= VULKAN_HPP_CPP_VERSION
4469  auto
4470 # else
4471  std::tuple<VULKAN_HPP_NAMESPACE::ComponentSwizzle const &,
4475 # endif
4476  reflect() const VULKAN_HPP_NOEXCEPT
4477  {
4478  return std::tie( r, g, b, a );
4479  }
4480 #endif
4481 
4482 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
4483  auto operator<=>( ComponentMapping const & ) const = default;
4484 #else
4486  {
4487 # if defined( VULKAN_HPP_USE_REFLECT )
4488  return this->reflect() == rhs.reflect();
4489 # else
4490  return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a );
4491 # endif
4492  }
4493 
4495  {
4496  return !operator==( rhs );
4497  }
4498 #endif
4499 
4500  public:
4505  };
4506 
4507 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
4508  struct AndroidHardwareBufferFormatProperties2ANDROID
4509  {
4511 
4512  static const bool allowDuplicate = false;
4513  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
4514 
4515 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
4516  VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID(
4518  uint64_t externalFormat_ = {},
4519  VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {},
4520  VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {},
4525  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
4526  : pNext( pNext_ )
4527  , format( format_ )
4528  , externalFormat( externalFormat_ )
4529  , formatFeatures( formatFeatures_ )
4530  , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
4531  , suggestedYcbcrModel( suggestedYcbcrModel_ )
4532  , suggestedYcbcrRange( suggestedYcbcrRange_ )
4533  , suggestedXChromaOffset( suggestedXChromaOffset_ )
4534  , suggestedYChromaOffset( suggestedYChromaOffset_ )
4535  {
4536  }
4537 
4539  AndroidHardwareBufferFormatProperties2ANDROID( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4540 
4541  AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
4542  : AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast<AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs ) )
4543  {
4544  }
4545 
4546  AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4547 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
4548 
4549  AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
4550  {
4551  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs );
4552  return *this;
4553  }
4554 
4556  {
4557  return *reinterpret_cast<const VkAndroidHardwareBufferFormatProperties2ANDROID *>( this );
4558  }
4559 
4561  {
4562  return *reinterpret_cast<VkAndroidHardwareBufferFormatProperties2ANDROID *>( this );
4563  }
4564 
4565 # if defined( VULKAN_HPP_USE_REFLECT )
4566 # if 14 <= VULKAN_HPP_CPP_VERSION
4567  auto
4568 # else
4569  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
4570  void * const &,
4572  uint64_t const &,
4579 # endif
4580  reflect() const VULKAN_HPP_NOEXCEPT
4581  {
4582  return std::tie( sType,
4583  pNext,
4584  format,
4585  externalFormat,
4586  formatFeatures,
4587  samplerYcbcrConversionComponents,
4588  suggestedYcbcrModel,
4589  suggestedYcbcrRange,
4590  suggestedXChromaOffset,
4591  suggestedYChromaOffset );
4592  }
4593 # endif
4594 
4595 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
4596  auto operator<=>( AndroidHardwareBufferFormatProperties2ANDROID const & ) const = default;
4597 # else
4598  bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
4599  {
4600 # if defined( VULKAN_HPP_USE_REFLECT )
4601  return this->reflect() == rhs.reflect();
4602 # else
4603  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) &&
4604  ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
4605  ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
4606  ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
4607 # endif
4608  }
4609 
4610  bool operator!=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
4611  {
4612  return !operator==( rhs );
4613  }
4614 # endif
4615 
4616  public:
4617  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
4618  void * pNext = {};
4620  uint64_t externalFormat = {};
4621  VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {};
4622  VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
4627  };
4628 
4629  template <>
4630  struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatProperties2ANDROID>
4631  {
4632  using Type = AndroidHardwareBufferFormatProperties2ANDROID;
4633  };
4634 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
4635 
4636 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
4637  struct AndroidHardwareBufferFormatPropertiesANDROID
4638  {
4640 
4641  static const bool allowDuplicate = false;
4642  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
4643 
4644 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
4645  VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(
4647  uint64_t externalFormat_ = {},
4648  VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {},
4649  VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {},
4654  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
4655  : pNext( pNext_ )
4656  , format( format_ )
4657  , externalFormat( externalFormat_ )
4658  , formatFeatures( formatFeatures_ )
4659  , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
4660  , suggestedYcbcrModel( suggestedYcbcrModel_ )
4661  , suggestedYcbcrRange( suggestedYcbcrRange_ )
4662  , suggestedXChromaOffset( suggestedXChromaOffset_ )
4663  , suggestedYChromaOffset( suggestedYChromaOffset_ )
4664  {
4665  }
4666 
4667  VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4668 
4669  AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
4670  : AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs ) )
4671  {
4672  }
4673 
4674  AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4675 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
4676 
4677  AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
4678  {
4679  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
4680  return *this;
4681  }
4682 
4684  {
4685  return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
4686  }
4687 
4689  {
4690  return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
4691  }
4692 
4693 # if defined( VULKAN_HPP_USE_REFLECT )
4694 # if 14 <= VULKAN_HPP_CPP_VERSION
4695  auto
4696 # else
4697  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
4698  void * const &,
4700  uint64_t const &,
4707 # endif
4708  reflect() const VULKAN_HPP_NOEXCEPT
4709  {
4710  return std::tie( sType,
4711  pNext,
4712  format,
4713  externalFormat,
4714  formatFeatures,
4715  samplerYcbcrConversionComponents,
4716  suggestedYcbcrModel,
4717  suggestedYcbcrRange,
4718  suggestedXChromaOffset,
4719  suggestedYChromaOffset );
4720  }
4721 # endif
4722 
4723 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
4724  auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default;
4725 # else
4726  bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
4727  {
4728 # if defined( VULKAN_HPP_USE_REFLECT )
4729  return this->reflect() == rhs.reflect();
4730 # else
4731  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) &&
4732  ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
4733  ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
4734  ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
4735 # endif
4736  }
4737 
4738  bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
4739  {
4740  return !operator==( rhs );
4741  }
4742 # endif
4743 
4744  public:
4745  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
4746  void * pNext = {};
4748  uint64_t externalFormat = {};
4749  VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
4750  VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
4755  };
4756 
4757  template <>
4758  struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
4759  {
4760  using Type = AndroidHardwareBufferFormatPropertiesANDROID;
4761  };
4762 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
4763 
4764 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
4765  struct AndroidHardwareBufferPropertiesANDROID
4766  {
4767  using NativeType = VkAndroidHardwareBufferPropertiesANDROID;
4768 
4769  static const bool allowDuplicate = false;
4770  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
4771 
4772 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
4773  VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
4774  uint32_t memoryTypeBits_ = {},
4775  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
4776  : pNext( pNext_ )
4777  , allocationSize( allocationSize_ )
4778  , memoryTypeBits( memoryTypeBits_ )
4779  {
4780  }
4781 
4782  VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4783 
4784  AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
4785  : AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs ) )
4786  {
4787  }
4788 
4789  AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4790 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
4791 
4792  AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
4793  {
4794  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
4795  return *this;
4796  }
4797 
4799  {
4800  return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID *>( this );
4801  }
4802 
4804  {
4805  return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( this );
4806  }
4807 
4808 # if defined( VULKAN_HPP_USE_REFLECT )
4809 # if 14 <= VULKAN_HPP_CPP_VERSION
4810  auto
4811 # else
4812  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
4813 # endif
4814  reflect() const VULKAN_HPP_NOEXCEPT
4815  {
4816  return std::tie( sType, pNext, allocationSize, memoryTypeBits );
4817  }
4818 # endif
4819 
4820 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
4821  auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default;
4822 # else
4823  bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
4824  {
4825 # if defined( VULKAN_HPP_USE_REFLECT )
4826  return this->reflect() == rhs.reflect();
4827 # else
4828  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits );
4829 # endif
4830  }
4831 
4832  bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
4833  {
4834  return !operator==( rhs );
4835  }
4836 # endif
4837 
4838  public:
4839  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
4840  void * pNext = {};
4841  VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
4842  uint32_t memoryTypeBits = {};
4843  };
4844 
4845  template <>
4846  struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
4847  {
4848  using Type = AndroidHardwareBufferPropertiesANDROID;
4849  };
4850 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
4851 
4852 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
4853  struct AndroidHardwareBufferUsageANDROID
4854  {
4855  using NativeType = VkAndroidHardwareBufferUsageANDROID;
4856 
4857  static const bool allowDuplicate = false;
4858  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID;
4859 
4860 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
4861  VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
4862  : pNext( pNext_ )
4863  , androidHardwareBufferUsage( androidHardwareBufferUsage_ )
4864  {
4865  }
4866 
4867  VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4868 
4869  AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
4870  : AndroidHardwareBufferUsageANDROID( *reinterpret_cast<AndroidHardwareBufferUsageANDROID const *>( &rhs ) )
4871  {
4872  }
4873 
4874  AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4875 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
4876 
4877  AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
4878  {
4879  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>( &rhs );
4880  return *this;
4881  }
4882 
4884  {
4885  return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID *>( this );
4886  }
4887 
4889  {
4890  return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>( this );
4891  }
4892 
4893 # if defined( VULKAN_HPP_USE_REFLECT )
4894 # if 14 <= VULKAN_HPP_CPP_VERSION
4895  auto
4896 # else
4897  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
4898 # endif
4899  reflect() const VULKAN_HPP_NOEXCEPT
4900  {
4901  return std::tie( sType, pNext, androidHardwareBufferUsage );
4902  }
4903 # endif
4904 
4905 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
4906  auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default;
4907 # else
4908  bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
4909  {
4910 # if defined( VULKAN_HPP_USE_REFLECT )
4911  return this->reflect() == rhs.reflect();
4912 # else
4913  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
4914 # endif
4915  }
4916 
4917  bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
4918  {
4919  return !operator==( rhs );
4920  }
4921 # endif
4922 
4923  public:
4924  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
4925  void * pNext = {};
4926  uint64_t androidHardwareBufferUsage = {};
4927  };
4928 
4929  template <>
4930  struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
4931  {
4932  using Type = AndroidHardwareBufferUsageANDROID;
4933  };
4934 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
4935 
4936 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
4937  struct AndroidSurfaceCreateInfoKHR
4938  {
4939  using NativeType = VkAndroidSurfaceCreateInfoKHR;
4940 
4941  static const bool allowDuplicate = false;
4942  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR;
4943 
4944 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
4945  VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {},
4946  struct ANativeWindow * window_ = {},
4947  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
4948  : pNext( pNext_ )
4949  , flags( flags_ )
4950  , window( window_ )
4951  {
4952  }
4953 
4954  VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4955 
4956  AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
4957  : AndroidSurfaceCreateInfoKHR( *reinterpret_cast<AndroidSurfaceCreateInfoKHR const *>( &rhs ) )
4958  {
4959  }
4960 
4961  AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
4962 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
4963 
4964  AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
4965  {
4966  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>( &rhs );
4967  return *this;
4968  }
4969 
4970 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
4971  VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
4972  {
4973  pNext = pNext_;
4974  return *this;
4975  }
4976 
4977  VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
4978  {
4979  flags = flags_;
4980  return *this;
4981  }
4982 
4983  VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT
4984  {
4985  window = window_;
4986  return *this;
4987  }
4988 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
4989 
4990  operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
4991  {
4992  return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( this );
4993  }
4994 
4996  {
4997  return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR *>( this );
4998  }
4999 
5000 # if defined( VULKAN_HPP_USE_REFLECT )
5001 # if 14 <= VULKAN_HPP_CPP_VERSION
5002  auto
5003 # else
5004  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
5005  const void * const &,
5006  VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR const &,
5007  struct ANativeWindow * const &>
5008 # endif
5009  reflect() const VULKAN_HPP_NOEXCEPT
5010  {
5011  return std::tie( sType, pNext, flags, window );
5012  }
5013 # endif
5014 
5015 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
5016  auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default;
5017 # else
5018  bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
5019  {
5020 # if defined( VULKAN_HPP_USE_REFLECT )
5021  return this->reflect() == rhs.reflect();
5022 # else
5023  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window );
5024 # endif
5025  }
5026 
5027  bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
5028  {
5029  return !operator==( rhs );
5030  }
5031 # endif
5032 
5033  public:
5034  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
5035  const void * pNext = {};
5036  VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {};
5037  struct ANativeWindow * window = {};
5038  };
5039 
5040  template <>
5041  struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
5042  {
5043  using Type = AndroidSurfaceCreateInfoKHR;
5044  };
5045 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
5046 
5048  {
5050 
5051  static const bool allowDuplicate = false;
5053 
5054 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
5055  VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {},
5056  uint32_t applicationVersion_ = {},
5057  const char * pEngineName_ = {},
5058  uint32_t engineVersion_ = {},
5059  uint32_t apiVersion_ = {},
5060  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
5061  : pNext( pNext_ )
5062  , pApplicationName( pApplicationName_ )
5063  , applicationVersion( applicationVersion_ )
5064  , pEngineName( pEngineName_ )
5065  , engineVersion( engineVersion_ )
5066  , apiVersion( apiVersion_ )
5067  {
5068  }
5069 
5071 
5072  ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) ) {}
5073 
5074  ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
5075 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
5076 
5078  {
5079  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>( &rhs );
5080  return *this;
5081  }
5082 
5083 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
5085  {
5086  pNext = pNext_;
5087  return *this;
5088  }
5089 
5091  {
5092  pApplicationName = pApplicationName_;
5093  return *this;
5094  }
5095 
5097  {
5098  applicationVersion = applicationVersion_;
5099  return *this;
5100  }
5101 
5103  {
5104  pEngineName = pEngineName_;
5105  return *this;
5106  }
5107 
5109  {
5110  engineVersion = engineVersion_;
5111  return *this;
5112  }
5113 
5115  {
5116  apiVersion = apiVersion_;
5117  return *this;
5118  }
5119 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
5120 
5121  operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT
5122  {
5123  return *reinterpret_cast<const VkApplicationInfo *>( this );
5124  }
5125 
5127  {
5128  return *reinterpret_cast<VkApplicationInfo *>( this );
5129  }
5130 
5131 #if defined( VULKAN_HPP_USE_REFLECT )
5132 # if 14 <= VULKAN_HPP_CPP_VERSION
5133  auto
5134 # else
5135  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
5136  const void * const &,
5137  const char * const &,
5138  uint32_t const &,
5139  const char * const &,
5140  uint32_t const &,
5141  uint32_t const &>
5142 # endif
5143  reflect() const VULKAN_HPP_NOEXCEPT
5144  {
5146  }
5147 #endif
5148 
5149 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
5150  std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
5151  {
5152  if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
5153  return cmp;
5154  if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
5155  return cmp;
5156  if ( pApplicationName != rhs.pApplicationName )
5157  if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 )
5158  return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
5159  if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 )
5160  return cmp;
5161  if ( pEngineName != rhs.pEngineName )
5162  if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 )
5163  return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
5164  if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 )
5165  return cmp;
5166  if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 )
5167  return cmp;
5168 
5169  return std::strong_ordering::equivalent;
5170  }
5171 #endif
5172 
5174  {
5175  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
5176  ( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) ) &&
5177  ( applicationVersion == rhs.applicationVersion ) && ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) ) &&
5178  ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion );
5179  }
5180 
5182  {
5183  return !operator==( rhs );
5184  }
5185 
5186  public:
5188  const void * pNext = {};
5189  const char * pApplicationName = {};
5190  uint32_t applicationVersion = {};
5191  const char * pEngineName = {};
5192  uint32_t engineVersion = {};
5193  uint32_t apiVersion = {};
5194  };
5195 
5196  template <>
5198  {
5200  };
5201 
5203  {
5205 
5206 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
5217  : flags( flags_ )
5218  , format( format_ )
5219  , samples( samples_ )
5220  , loadOp( loadOp_ )
5221  , storeOp( storeOp_ )
5222  , stencilLoadOp( stencilLoadOp_ )
5223  , stencilStoreOp( stencilStoreOp_ )
5224  , initialLayout( initialLayout_ )
5225  , finalLayout( finalLayout_ )
5226  {
5227  }
5228 
5230 
5232  : AttachmentDescription( *reinterpret_cast<AttachmentDescription const *>( &rhs ) )
5233  {
5234  }
5235 
5237 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
5238 
5240  {
5241  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>( &rhs );
5242  return *this;
5243  }
5244 
5245 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
5247  {
5248  flags = flags_;
5249  return *this;
5250  }
5251 
5253  {
5254  format = format_;
5255  return *this;
5256  }
5257 
5259  {
5260  samples = samples_;
5261  return *this;
5262  }
5263 
5265  {
5266  loadOp = loadOp_;
5267  return *this;
5268  }
5269 
5271  {
5272  storeOp = storeOp_;
5273  return *this;
5274  }
5275 
5277  {
5278  stencilLoadOp = stencilLoadOp_;
5279  return *this;
5280  }
5281 
5283  {
5284  stencilStoreOp = stencilStoreOp_;
5285  return *this;
5286  }
5287 
5289  {
5290  initialLayout = initialLayout_;
5291  return *this;
5292  }
5293 
5295  {
5296  finalLayout = finalLayout_;
5297  return *this;
5298  }
5299 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
5300 
5302  {
5303  return *reinterpret_cast<const VkAttachmentDescription *>( this );
5304  }
5305 
5307  {
5308  return *reinterpret_cast<VkAttachmentDescription *>( this );
5309  }
5310 
5311 #if defined( VULKAN_HPP_USE_REFLECT )
5312 # if 14 <= VULKAN_HPP_CPP_VERSION
5313  auto
5314 # else
5324 # endif
5325  reflect() const VULKAN_HPP_NOEXCEPT
5326  {
5328  }
5329 #endif
5330 
5331 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
5332  auto operator<=>( AttachmentDescription const & ) const = default;
5333 #else
5335  {
5336 # if defined( VULKAN_HPP_USE_REFLECT )
5337  return this->reflect() == rhs.reflect();
5338 # else
5339  return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) &&
5340  ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) &&
5341  ( finalLayout == rhs.finalLayout );
5342 # endif
5343  }
5344 
5346  {
5347  return !operator==( rhs );
5348  }
5349 #endif
5350 
5351  public:
5361  };
5362 
5364  {
5366 
5367  static const bool allowDuplicate = false;
5369 
5370 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
5380  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
5381  : pNext( pNext_ )
5382  , flags( flags_ )
5383  , format( format_ )
5384  , samples( samples_ )
5385  , loadOp( loadOp_ )
5386  , storeOp( storeOp_ )
5387  , stencilLoadOp( stencilLoadOp_ )
5388  , stencilStoreOp( stencilStoreOp_ )
5389  , initialLayout( initialLayout_ )
5390  , finalLayout( finalLayout_ )
5391  {
5392  }
5393 
5395 
5397  : AttachmentDescription2( *reinterpret_cast<AttachmentDescription2 const *>( &rhs ) )
5398  {
5399  }
5400 
5402 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
5403 
5405  {
5406  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>( &rhs );
5407  return *this;
5408  }
5409 
5410 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
5412  {
5413  pNext = pNext_;
5414  return *this;
5415  }
5416 
5418  {
5419  flags = flags_;
5420  return *this;
5421  }
5422 
5424  {
5425  format = format_;
5426  return *this;
5427  }
5428 
5430  {
5431  samples = samples_;
5432  return *this;
5433  }
5434 
5436  {
5437  loadOp = loadOp_;
5438  return *this;
5439  }
5440 
5442  {
5443  storeOp = storeOp_;
5444  return *this;
5445  }
5446 
5448  {
5449  stencilLoadOp = stencilLoadOp_;
5450  return *this;
5451  }
5452 
5454  {
5455  stencilStoreOp = stencilStoreOp_;
5456  return *this;
5457  }
5458 
5460  {
5461  initialLayout = initialLayout_;
5462  return *this;
5463  }
5464 
5466  {
5467  finalLayout = finalLayout_;
5468  return *this;
5469  }
5470 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
5471 
5473  {
5474  return *reinterpret_cast<const VkAttachmentDescription2 *>( this );
5475  }
5476 
5478  {
5479  return *reinterpret_cast<VkAttachmentDescription2 *>( this );
5480  }
5481 
5482 #if defined( VULKAN_HPP_USE_REFLECT )
5483 # if 14 <= VULKAN_HPP_CPP_VERSION
5484  auto
5485 # else
5486  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
5487  const void * const &,
5497 # endif
5498  reflect() const VULKAN_HPP_NOEXCEPT
5499  {
5501  }
5502 #endif
5503 
5504 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
5505  auto operator<=>( AttachmentDescription2 const & ) const = default;
5506 #else
5508  {
5509 # if defined( VULKAN_HPP_USE_REFLECT )
5510  return this->reflect() == rhs.reflect();
5511 # else
5512  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) &&
5513  ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) &&
5514  ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout );
5515 # endif
5516  }
5517 
5519  {
5520  return !operator==( rhs );
5521  }
5522 #endif
5523 
5524  public:
5526  const void * pNext = {};
5536  };
5537 
5538  template <>
5540  {
5542  };
5544 
5546  {
5548 
5549  static const bool allowDuplicate = false;
5551 
5552 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
5556  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
5557  : pNext( pNext_ )
5558  , stencilInitialLayout( stencilInitialLayout_ )
5559  , stencilFinalLayout( stencilFinalLayout_ )
5560  {
5561  }
5562 
5564 
5566  : AttachmentDescriptionStencilLayout( *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs ) )
5567  {
5568  }
5569 
5571 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
5572 
5574  {
5575  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
5576  return *this;
5577  }
5578 
5579 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
5581  {
5582  pNext = pNext_;
5583  return *this;
5584  }
5585 
5588  {
5589  stencilInitialLayout = stencilInitialLayout_;
5590  return *this;
5591  }
5592 
5595  {
5596  stencilFinalLayout = stencilFinalLayout_;
5597  return *this;
5598  }
5599 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
5600 
5602  {
5603  return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout *>( this );
5604  }
5605 
5607  {
5608  return *reinterpret_cast<VkAttachmentDescriptionStencilLayout *>( this );
5609  }
5610 
5611 #if defined( VULKAN_HPP_USE_REFLECT )
5612 # if 14 <= VULKAN_HPP_CPP_VERSION
5613  auto
5614 # else
5615  std::
5616  tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
5617 # endif
5618  reflect() const VULKAN_HPP_NOEXCEPT
5619  {
5620  return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout );
5621  }
5622 #endif
5623 
5624 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
5625  auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default;
5626 #else
5628  {
5629 # if defined( VULKAN_HPP_USE_REFLECT )
5630  return this->reflect() == rhs.reflect();
5631 # else
5632  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) &&
5633  ( stencilFinalLayout == rhs.stencilFinalLayout );
5634 # endif
5635  }
5636 
5638  {
5639  return !operator==( rhs );
5640  }
5641 #endif
5642 
5643  public:
5645  void * pNext = {};
5648  };
5649 
5650  template <>
5652  {
5654  };
5656 
5658  {
5660 
5661 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
5662  VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {},
5664  : attachment( attachment_ )
5665  , layout( layout_ )
5666  {
5667  }
5668 
5670 
5671  AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
5672  {
5673  }
5674 
5676 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
5677 
5679  {
5680  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>( &rhs );
5681  return *this;
5682  }
5683 
5684 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
5686  {
5687  attachment = attachment_;
5688  return *this;
5689  }
5690 
5692  {
5693  layout = layout_;
5694  return *this;
5695  }
5696 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
5697 
5699  {
5700  return *reinterpret_cast<const VkAttachmentReference *>( this );
5701  }
5702 
5704  {
5705  return *reinterpret_cast<VkAttachmentReference *>( this );
5706  }
5707 
5708 #if defined( VULKAN_HPP_USE_REFLECT )
5709 # if 14 <= VULKAN_HPP_CPP_VERSION
5710  auto
5711 # else
5712  std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
5713 # endif
5714  reflect() const VULKAN_HPP_NOEXCEPT
5715  {
5716  return std::tie( attachment, layout );
5717  }
5718 #endif
5719 
5720 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
5721  auto operator<=>( AttachmentReference const & ) const = default;
5722 #else
5724  {
5725 # if defined( VULKAN_HPP_USE_REFLECT )
5726  return this->reflect() == rhs.reflect();
5727 # else
5728  return ( attachment == rhs.attachment ) && ( layout == rhs.layout );
5729 # endif
5730  }
5731 
5733  {
5734  return !operator==( rhs );
5735  }
5736 #endif
5737 
5738  public:
5739  uint32_t attachment = {};
5741  };
5742 
5744  {
5746 
5747  static const bool allowDuplicate = false;
5749 
5750 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
5751  VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {},
5753  VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
5754  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
5755  : pNext( pNext_ )
5756  , attachment( attachment_ )
5757  , layout( layout_ )
5758  , aspectMask( aspectMask_ )
5759  {
5760  }
5761 
5763 
5765  : AttachmentReference2( *reinterpret_cast<AttachmentReference2 const *>( &rhs ) )
5766  {
5767  }
5768 
5770 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
5771 
5773  {
5774  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>( &rhs );
5775  return *this;
5776  }
5777 
5778 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
5780  {
5781  pNext = pNext_;
5782  return *this;
5783  }
5784 
5786  {
5787  attachment = attachment_;
5788  return *this;
5789  }
5790 
5792  {
5793  layout = layout_;
5794  return *this;
5795  }
5796 
5798  {
5799  aspectMask = aspectMask_;
5800  return *this;
5801  }
5802 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
5803 
5805  {
5806  return *reinterpret_cast<const VkAttachmentReference2 *>( this );
5807  }
5808 
5810  {
5811  return *reinterpret_cast<VkAttachmentReference2 *>( this );
5812  }
5813 
5814 #if defined( VULKAN_HPP_USE_REFLECT )
5815 # if 14 <= VULKAN_HPP_CPP_VERSION
5816  auto
5817 # else
5818  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
5819  const void * const &,
5820  uint32_t const &,
5823 # endif
5824  reflect() const VULKAN_HPP_NOEXCEPT
5825  {
5826  return std::tie( sType, pNext, attachment, layout, aspectMask );
5827  }
5828 #endif
5829 
5830 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
5831  auto operator<=>( AttachmentReference2 const & ) const = default;
5832 #else
5834  {
5835 # if defined( VULKAN_HPP_USE_REFLECT )
5836  return this->reflect() == rhs.reflect();
5837 # else
5838  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && ( layout == rhs.layout ) &&
5839  ( aspectMask == rhs.aspectMask );
5840 # endif
5841  }
5842 
5844  {
5845  return !operator==( rhs );
5846  }
5847 #endif
5848 
5849  public:
5851  const void * pNext = {};
5852  uint32_t attachment = {};
5855  };
5856 
5857  template <>
5859  {
5861  };
5863 
5865  {
5867 
5868  static const bool allowDuplicate = false;
5870 
5871 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
5873  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
5874  : pNext( pNext_ )
5875  , stencilLayout( stencilLayout_ )
5876  {
5877  }
5878 
5880 
5882  : AttachmentReferenceStencilLayout( *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs ) )
5883  {
5884  }
5885 
5887 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
5888 
5890  {
5891  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>( &rhs );
5892  return *this;
5893  }
5894 
5895 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
5897  {
5898  pNext = pNext_;
5899  return *this;
5900  }
5901 
5903  {
5904  stencilLayout = stencilLayout_;
5905  return *this;
5906  }
5907 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
5908 
5910  {
5911  return *reinterpret_cast<const VkAttachmentReferenceStencilLayout *>( this );
5912  }
5913 
5915  {
5916  return *reinterpret_cast<VkAttachmentReferenceStencilLayout *>( this );
5917  }
5918 
5919 #if defined( VULKAN_HPP_USE_REFLECT )
5920 # if 14 <= VULKAN_HPP_CPP_VERSION
5921  auto
5922 # else
5923  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
5924 # endif
5925  reflect() const VULKAN_HPP_NOEXCEPT
5926  {
5927  return std::tie( sType, pNext, stencilLayout );
5928  }
5929 #endif
5930 
5931 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
5932  auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default;
5933 #else
5935  {
5936 # if defined( VULKAN_HPP_USE_REFLECT )
5937  return this->reflect() == rhs.reflect();
5938 # else
5939  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout );
5940 # endif
5941  }
5942 
5944  {
5945  return !operator==( rhs );
5946  }
5947 #endif
5948 
5949  public:
5951  void * pNext = {};
5953  };
5954 
5955  template <>
5957  {
5959  };
5961 
5963  {
5965 
5966  static const bool allowDuplicate = false;
5968 
5969 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
5971  AttachmentSampleCountInfoAMD( uint32_t colorAttachmentCount_ = {},
5972  const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {},
5974  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
5975  : pNext( pNext_ )
5976  , colorAttachmentCount( colorAttachmentCount_ )
5977  , pColorAttachmentSamples( pColorAttachmentSamples_ )
5978  , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
5979  {
5980  }
5981 
5983 
5985  : AttachmentSampleCountInfoAMD( *reinterpret_cast<AttachmentSampleCountInfoAMD const *>( &rhs ) )
5986  {
5987  }
5988 
5989 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
5993  const void * pNext_ = nullptr )
5994  : pNext( pNext_ )
5995  , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentSamples_.size() ) )
5996  , pColorAttachmentSamples( colorAttachmentSamples_.data() )
5997  , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
5998  {
5999  }
6000 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6001 
6003 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6004 
6006  {
6007  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const *>( &rhs );
6008  return *this;
6009  }
6010 
6011 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
6013  {
6014  pNext = pNext_;
6015  return *this;
6016  }
6017 
6019  {
6020  colorAttachmentCount = colorAttachmentCount_;
6021  return *this;
6022  }
6023 
6026  {
6027  pColorAttachmentSamples = pColorAttachmentSamples_;
6028  return *this;
6029  }
6030 
6031 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
6034  {
6035  colorAttachmentCount = static_cast<uint32_t>( colorAttachmentSamples_.size() );
6036  pColorAttachmentSamples = colorAttachmentSamples_.data();
6037  return *this;
6038  }
6039 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6040 
6043  {
6044  depthStencilAttachmentSamples = depthStencilAttachmentSamples_;
6045  return *this;
6046  }
6047 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
6048 
6050  {
6051  return *reinterpret_cast<const VkAttachmentSampleCountInfoAMD *>( this );
6052  }
6053 
6055  {
6056  return *reinterpret_cast<VkAttachmentSampleCountInfoAMD *>( this );
6057  }
6058 
6059 #if defined( VULKAN_HPP_USE_REFLECT )
6060 # if 14 <= VULKAN_HPP_CPP_VERSION
6061  auto
6062 # else
6063  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
6064  const void * const &,
6065  uint32_t const &,
6068 # endif
6069  reflect() const VULKAN_HPP_NOEXCEPT
6070  {
6072  }
6073 #endif
6074 
6075 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
6076  auto operator<=>( AttachmentSampleCountInfoAMD const & ) const = default;
6077 #else
6079  {
6080 # if defined( VULKAN_HPP_USE_REFLECT )
6081  return this->reflect() == rhs.reflect();
6082 # else
6083  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
6084  ( pColorAttachmentSamples == rhs.pColorAttachmentSamples ) && ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples );
6085 # endif
6086  }
6087 
6089  {
6090  return !operator==( rhs );
6091  }
6092 #endif
6093 
6094  public:
6096  const void * pNext = {};
6097  uint32_t colorAttachmentCount = {};
6100  };
6101 
6102  template <>
6104  {
6106  };
6108 
6109  struct Extent2D
6110  {
6112 
6113 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
6114  VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT
6115  : width( width_ )
6116  , height( height_ )
6117  {
6118  }
6119 
6120  VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
6121 
6122  Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast<Extent2D const *>( &rhs ) ) {}
6123 
6124  Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
6125 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6126 
6128  {
6129  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>( &rhs );
6130  return *this;
6131  }
6132 
6133 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
6135  {
6136  width = width_;
6137  return *this;
6138  }
6139 
6141  {
6142  height = height_;
6143  return *this;
6144  }
6145 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
6146 
6147  operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT
6148  {
6149  return *reinterpret_cast<const VkExtent2D *>( this );
6150  }
6151 
6153  {
6154  return *reinterpret_cast<VkExtent2D *>( this );
6155  }
6156 
6157 #if defined( VULKAN_HPP_USE_REFLECT )
6158 # if 14 <= VULKAN_HPP_CPP_VERSION
6159  auto
6160 # else
6161  std::tuple<uint32_t const &, uint32_t const &>
6162 # endif
6163  reflect() const VULKAN_HPP_NOEXCEPT
6164  {
6165  return std::tie( width, height );
6166  }
6167 #endif
6168 
6169 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
6170  auto operator<=>( Extent2D const & ) const = default;
6171 #else
6172  bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
6173  {
6174 # if defined( VULKAN_HPP_USE_REFLECT )
6175  return this->reflect() == rhs.reflect();
6176 # else
6177  return ( width == rhs.width ) && ( height == rhs.height );
6178 # endif
6179  }
6180 
6181  bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
6182  {
6183  return !operator==( rhs );
6184  }
6185 #endif
6186 
6187  public:
6188  uint32_t width = {};
6189  uint32_t height = {};
6190  };
6191 
6193  {
6195 
6196 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
6198  : x( x_ )
6199  , y( y_ )
6200  {
6201  }
6202 
6204 
6205  SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT : SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) ) {}
6206 
6208 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6209 
6211  {
6212  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>( &rhs );
6213  return *this;
6214  }
6215 
6216 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
6218  {
6219  x = x_;
6220  return *this;
6221  }
6222 
6224  {
6225  y = y_;
6226  return *this;
6227  }
6228 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
6229 
6230  operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT
6231  {
6232  return *reinterpret_cast<const VkSampleLocationEXT *>( this );
6233  }
6234 
6236  {
6237  return *reinterpret_cast<VkSampleLocationEXT *>( this );
6238  }
6239 
6240 #if defined( VULKAN_HPP_USE_REFLECT )
6241 # if 14 <= VULKAN_HPP_CPP_VERSION
6242  auto
6243 # else
6244  std::tuple<float const &, float const &>
6245 # endif
6246  reflect() const VULKAN_HPP_NOEXCEPT
6247  {
6248  return std::tie( x, y );
6249  }
6250 #endif
6251 
6252 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
6253  auto operator<=>( SampleLocationEXT const & ) const = default;
6254 #else
6256  {
6257 # if defined( VULKAN_HPP_USE_REFLECT )
6258  return this->reflect() == rhs.reflect();
6259 # else
6260  return ( x == rhs.x ) && ( y == rhs.y );
6261 # endif
6262  }
6263 
6265  {
6266  return !operator==( rhs );
6267  }
6268 #endif
6269 
6270  public:
6271  float x = {};
6272  float y = {};
6273  };
6274 
6276  {
6278 
6279  static const bool allowDuplicate = false;
6281 
6282 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
6285  VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {},
6286  uint32_t sampleLocationsCount_ = {},
6287  const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {},
6288  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
6289  : pNext( pNext_ )
6290  , sampleLocationsPerPixel( sampleLocationsPerPixel_ )
6291  , sampleLocationGridSize( sampleLocationGridSize_ )
6292  , sampleLocationsCount( sampleLocationsCount_ )
6293  , pSampleLocations( pSampleLocations_ )
6294  {
6295  }
6296 
6298 
6300  : SampleLocationsInfoEXT( *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs ) )
6301  {
6302  }
6303 
6304 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
6306  VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_,
6308  const void * pNext_ = nullptr )
6309  : pNext( pNext_ )
6310  , sampleLocationsPerPixel( sampleLocationsPerPixel_ )
6311  , sampleLocationGridSize( sampleLocationGridSize_ )
6312  , sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) )
6313  , pSampleLocations( sampleLocations_.data() )
6314  {
6315  }
6316 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6317 
6319 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6320 
6322  {
6323  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>( &rhs );
6324  return *this;
6325  }
6326 
6327 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
6329  {
6330  pNext = pNext_;
6331  return *this;
6332  }
6333 
6336  {
6337  sampleLocationsPerPixel = sampleLocationsPerPixel_;
6338  return *this;
6339  }
6340 
6343  {
6344  sampleLocationGridSize = sampleLocationGridSize_;
6345  return *this;
6346  }
6347 
6349  {
6350  sampleLocationsCount = sampleLocationsCount_;
6351  return *this;
6352  }
6353 
6356  {
6357  pSampleLocations = pSampleLocations_;
6358  return *this;
6359  }
6360 
6361 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
6364  {
6365  sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
6366  pSampleLocations = sampleLocations_.data();
6367  return *this;
6368  }
6369 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6370 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
6371 
6373  {
6374  return *reinterpret_cast<const VkSampleLocationsInfoEXT *>( this );
6375  }
6376 
6378  {
6379  return *reinterpret_cast<VkSampleLocationsInfoEXT *>( this );
6380  }
6381 
6382 #if defined( VULKAN_HPP_USE_REFLECT )
6383 # if 14 <= VULKAN_HPP_CPP_VERSION
6384  auto
6385 # else
6386  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
6387  const void * const &,
6390  uint32_t const &,
6392 # endif
6393  reflect() const VULKAN_HPP_NOEXCEPT
6394  {
6396  }
6397 #endif
6398 
6399 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
6400  auto operator<=>( SampleLocationsInfoEXT const & ) const = default;
6401 #else
6403  {
6404 # if defined( VULKAN_HPP_USE_REFLECT )
6405  return this->reflect() == rhs.reflect();
6406 # else
6407  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) &&
6408  ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && ( sampleLocationsCount == rhs.sampleLocationsCount ) &&
6409  ( pSampleLocations == rhs.pSampleLocations );
6410 # endif
6411  }
6412 
6414  {
6415  return !operator==( rhs );
6416  }
6417 #endif
6418 
6419  public:
6421  const void * pNext = {};
6424  uint32_t sampleLocationsCount = {};
6426  };
6427 
6428  template <>
6430  {
6432  };
6433 
6435  {
6437 
6438 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
6439  VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {},
6441  : attachmentIndex( attachmentIndex_ )
6442  , sampleLocationsInfo( sampleLocationsInfo_ )
6443  {
6444  }
6445 
6447 
6449  : AttachmentSampleLocationsEXT( *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs ) )
6450  {
6451  }
6452 
6454 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6455 
6457  {
6458  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>( &rhs );
6459  return *this;
6460  }
6461 
6462 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
6464  {
6465  attachmentIndex = attachmentIndex_;
6466  return *this;
6467  }
6468 
6471  {
6472  sampleLocationsInfo = sampleLocationsInfo_;
6473  return *this;
6474  }
6475 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
6476 
6478  {
6479  return *reinterpret_cast<const VkAttachmentSampleLocationsEXT *>( this );
6480  }
6481 
6483  {
6484  return *reinterpret_cast<VkAttachmentSampleLocationsEXT *>( this );
6485  }
6486 
6487 #if defined( VULKAN_HPP_USE_REFLECT )
6488 # if 14 <= VULKAN_HPP_CPP_VERSION
6489  auto
6490 # else
6491  std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
6492 # endif
6493  reflect() const VULKAN_HPP_NOEXCEPT
6494  {
6495  return std::tie( attachmentIndex, sampleLocationsInfo );
6496  }
6497 #endif
6498 
6499 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
6500  auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default;
6501 #else
6503  {
6504 # if defined( VULKAN_HPP_USE_REFLECT )
6505  return this->reflect() == rhs.reflect();
6506 # else
6507  return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
6508 # endif
6509  }
6510 
6512  {
6513  return !operator==( rhs );
6514  }
6515 #endif
6516 
6517  public:
6518  uint32_t attachmentIndex = {};
6520  };
6521 
6523  {
6525 
6526 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
6528  const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
6529  : sType( sType_ )
6530  , pNext( pNext_ )
6531  {
6532  }
6533 
6534  BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
6535 
6536  BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) ) {}
6537 
6538  BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
6539 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6540 
6542  {
6543  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>( &rhs );
6544  return *this;
6545  }
6546 
6547 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
6549  {
6550  pNext = pNext_;
6551  return *this;
6552  }
6553 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
6554 
6555  operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT
6556  {
6557  return *reinterpret_cast<const VkBaseInStructure *>( this );
6558  }
6559 
6561  {
6562  return *reinterpret_cast<VkBaseInStructure *>( this );
6563  }
6564 
6565 #if defined( VULKAN_HPP_USE_REFLECT )
6566 # if 14 <= VULKAN_HPP_CPP_VERSION
6567  auto
6568 # else
6569  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * const &>
6570 # endif
6571  reflect() const VULKAN_HPP_NOEXCEPT
6572  {
6573  return std::tie( sType, pNext );
6574  }
6575 #endif
6576 
6577 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
6578  auto operator<=>( BaseInStructure const & ) const = default;
6579 #else
6581  {
6582 # if defined( VULKAN_HPP_USE_REFLECT )
6583  return this->reflect() == rhs.reflect();
6584 # else
6585  return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
6586 # endif
6587  }
6588 
6590  {
6591  return !operator==( rhs );
6592  }
6593 #endif
6594 
6595  public:
6598  };
6599 
6601  {
6603 
6604 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
6607  : sType( sType_ )
6608  , pNext( pNext_ )
6609  {
6610  }
6611 
6612  BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
6613 
6614  BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) ) {}
6615 
6617 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6618 
6620  {
6621  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>( &rhs );
6622  return *this;
6623  }
6624 
6625 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
6627  {
6628  pNext = pNext_;
6629  return *this;
6630  }
6631 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
6632 
6633  operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT
6634  {
6635  return *reinterpret_cast<const VkBaseOutStructure *>( this );
6636  }
6637 
6639  {
6640  return *reinterpret_cast<VkBaseOutStructure *>( this );
6641  }
6642 
6643 #if defined( VULKAN_HPP_USE_REFLECT )
6644 # if 14 <= VULKAN_HPP_CPP_VERSION
6645  auto
6646 # else
6647  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, struct VULKAN_HPP_NAMESPACE::BaseOutStructure * const &>
6648 # endif
6649  reflect() const VULKAN_HPP_NOEXCEPT
6650  {
6651  return std::tie( sType, pNext );
6652  }
6653 #endif
6654 
6655 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
6656  auto operator<=>( BaseOutStructure const & ) const = default;
6657 #else
6659  {
6660 # if defined( VULKAN_HPP_USE_REFLECT )
6661  return this->reflect() == rhs.reflect();
6662 # else
6663  return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
6664 # endif
6665  }
6666 
6668  {
6669  return !operator==( rhs );
6670  }
6671 #endif
6672 
6673  public:
6676  };
6677 
6679  {
6681 
6682  static const bool allowDuplicate = false;
6684 
6685 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
6688  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
6689  uint32_t deviceIndexCount_ = {},
6690  const uint32_t * pDeviceIndices_ = {},
6691  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
6692  : pNext( pNext_ )
6693  , accelerationStructure( accelerationStructure_ )
6694  , memory( memory_ )
6695  , memoryOffset( memoryOffset_ )
6696  , deviceIndexCount( deviceIndexCount_ )
6697  , pDeviceIndices( pDeviceIndices_ )
6698  {
6699  }
6700 
6702 
6704  : BindAccelerationStructureMemoryInfoNV( *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs ) )
6705  {
6706  }
6707 
6708 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
6711  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_,
6713  const void * pNext_ = nullptr )
6714  : pNext( pNext_ )
6715  , accelerationStructure( accelerationStructure_ )
6716  , memory( memory_ )
6717  , memoryOffset( memoryOffset_ )
6718  , deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
6719  , pDeviceIndices( deviceIndices_.data() )
6720  {
6721  }
6722 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6723 
6725 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6726 
6728  {
6729  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>( &rhs );
6730  return *this;
6731  }
6732 
6733 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
6735  {
6736  pNext = pNext_;
6737  return *this;
6738  }
6739 
6742  {
6743  accelerationStructure = accelerationStructure_;
6744  return *this;
6745  }
6746 
6748  {
6749  memory = memory_;
6750  return *this;
6751  }
6752 
6754  {
6755  memoryOffset = memoryOffset_;
6756  return *this;
6757  }
6758 
6760  {
6761  deviceIndexCount = deviceIndexCount_;
6762  return *this;
6763  }
6764 
6766  {
6767  pDeviceIndices = pDeviceIndices_;
6768  return *this;
6769  }
6770 
6771 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
6774  {
6775  deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
6776  pDeviceIndices = deviceIndices_.data();
6777  return *this;
6778  }
6779 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6780 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
6781 
6783  {
6784  return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( this );
6785  }
6786 
6788  {
6789  return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV *>( this );
6790  }
6791 
6792 #if defined( VULKAN_HPP_USE_REFLECT )
6793 # if 14 <= VULKAN_HPP_CPP_VERSION
6794  auto
6795 # else
6796  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
6797  const void * const &,
6801  uint32_t const &,
6802  const uint32_t * const &>
6803 # endif
6804  reflect() const VULKAN_HPP_NOEXCEPT
6805  {
6807  }
6808 #endif
6809 
6810 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
6811  auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default;
6812 #else
6814  {
6815 # if defined( VULKAN_HPP_USE_REFLECT )
6816  return this->reflect() == rhs.reflect();
6817 # else
6818  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) &&
6819  ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices );
6820 # endif
6821  }
6822 
6824  {
6825  return !operator==( rhs );
6826  }
6827 #endif
6828 
6829  public:
6831  const void * pNext = {};
6835  uint32_t deviceIndexCount = {};
6836  const uint32_t * pDeviceIndices = {};
6837  };
6838 
6839  template <>
6841  {
6843  };
6844 
6846  {
6848 
6849  static const bool allowDuplicate = false;
6851 
6852 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
6853  VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
6854  const uint32_t * pDeviceIndices_ = {},
6855  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
6856  : pNext( pNext_ )
6857  , deviceIndexCount( deviceIndexCount_ )
6858  , pDeviceIndices( pDeviceIndices_ )
6859  {
6860  }
6861 
6863 
6865  : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs ) )
6866  {
6867  }
6868 
6869 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
6871  : pNext( pNext_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
6872  {
6873  }
6874 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6875 
6877 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6878 
6880  {
6881  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>( &rhs );
6882  return *this;
6883  }
6884 
6885 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
6887  {
6888  pNext = pNext_;
6889  return *this;
6890  }
6891 
6893  {
6894  deviceIndexCount = deviceIndexCount_;
6895  return *this;
6896  }
6897 
6899  {
6900  pDeviceIndices = pDeviceIndices_;
6901  return *this;
6902  }
6903 
6904 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
6907  {
6908  deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
6909  pDeviceIndices = deviceIndices_.data();
6910  return *this;
6911  }
6912 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6913 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
6914 
6916  {
6917  return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo *>( this );
6918  }
6919 
6921  {
6922  return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo *>( this );
6923  }
6924 
6925 #if defined( VULKAN_HPP_USE_REFLECT )
6926 # if 14 <= VULKAN_HPP_CPP_VERSION
6927  auto
6928 # else
6929  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
6930 # endif
6931  reflect() const VULKAN_HPP_NOEXCEPT
6932  {
6933  return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices );
6934  }
6935 #endif
6936 
6937 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
6938  auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default;
6939 #else
6941  {
6942 # if defined( VULKAN_HPP_USE_REFLECT )
6943  return this->reflect() == rhs.reflect();
6944 # else
6945  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices );
6946 # endif
6947  }
6948 
6950  {
6951  return !operator==( rhs );
6952  }
6953 #endif
6954 
6955  public:
6957  const void * pNext = {};
6958  uint32_t deviceIndexCount = {};
6959  const uint32_t * pDeviceIndices = {};
6960  };
6961 
6962  template <>
6964  {
6966  };
6968 
6970  {
6972 
6973  static const bool allowDuplicate = false;
6975 
6976 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
6979  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
6980  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
6981  : pNext( pNext_ )
6982  , buffer( buffer_ )
6983  , memory( memory_ )
6984  , memoryOffset( memoryOffset_ )
6985  {
6986  }
6987 
6989 
6991  : BindBufferMemoryInfo( *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs ) )
6992  {
6993  }
6994 
6996 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
6997 
6999  {
7000  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>( &rhs );
7001  return *this;
7002  }
7003 
7004 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7006  {
7007  pNext = pNext_;
7008  return *this;
7009  }
7010 
7012  {
7013  buffer = buffer_;
7014  return *this;
7015  }
7016 
7018  {
7019  memory = memory_;
7020  return *this;
7021  }
7022 
7024  {
7025  memoryOffset = memoryOffset_;
7026  return *this;
7027  }
7028 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7029 
7031  {
7032  return *reinterpret_cast<const VkBindBufferMemoryInfo *>( this );
7033  }
7034 
7036  {
7037  return *reinterpret_cast<VkBindBufferMemoryInfo *>( this );
7038  }
7039 
7040 #if defined( VULKAN_HPP_USE_REFLECT )
7041 # if 14 <= VULKAN_HPP_CPP_VERSION
7042  auto
7043 # else
7044  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
7045  const void * const &,
7049 # endif
7050  reflect() const VULKAN_HPP_NOEXCEPT
7051  {
7052  return std::tie( sType, pNext, buffer, memory, memoryOffset );
7053  }
7054 #endif
7055 
7056 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
7057  auto operator<=>( BindBufferMemoryInfo const & ) const = default;
7058 #else
7060  {
7061 # if defined( VULKAN_HPP_USE_REFLECT )
7062  return this->reflect() == rhs.reflect();
7063 # else
7064  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
7065 # endif
7066  }
7067 
7069  {
7070  return !operator==( rhs );
7071  }
7072 #endif
7073 
7074  public:
7076  const void * pNext = {};
7080  };
7081 
7082  template <>
7084  {
7086  };
7088 
7089  struct Offset2D
7090  {
7092 
7093 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
7094  VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT
7095  : x( x_ )
7096  , y( y_ )
7097  {
7098  }
7099 
7100  VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
7101 
7102  Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast<Offset2D const *>( &rhs ) ) {}
7103 
7104  Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
7105 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
7106 
7108  {
7109  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>( &rhs );
7110  return *this;
7111  }
7112 
7113 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7115  {
7116  x = x_;
7117  return *this;
7118  }
7119 
7121  {
7122  y = y_;
7123  return *this;
7124  }
7125 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7126 
7127  operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT
7128  {
7129  return *reinterpret_cast<const VkOffset2D *>( this );
7130  }
7131 
7133  {
7134  return *reinterpret_cast<VkOffset2D *>( this );
7135  }
7136 
7137 #if defined( VULKAN_HPP_USE_REFLECT )
7138 # if 14 <= VULKAN_HPP_CPP_VERSION
7139  auto
7140 # else
7141  std::tuple<int32_t const &, int32_t const &>
7142 # endif
7143  reflect() const VULKAN_HPP_NOEXCEPT
7144  {
7145  return std::tie( x, y );
7146  }
7147 #endif
7148 
7149 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
7150  auto operator<=>( Offset2D const & ) const = default;
7151 #else
7152  bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
7153  {
7154 # if defined( VULKAN_HPP_USE_REFLECT )
7155  return this->reflect() == rhs.reflect();
7156 # else
7157  return ( x == rhs.x ) && ( y == rhs.y );
7158 # endif
7159  }
7160 
7161  bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
7162  {
7163  return !operator==( rhs );
7164  }
7165 #endif
7166 
7167  public:
7168  int32_t x = {};
7169  int32_t y = {};
7170  };
7171 
7172  struct Rect2D
7173  {
7175 
7176 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
7178  : offset( offset_ )
7179  , extent( extent_ )
7180  {
7181  }
7182 
7183  VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
7184 
7185  Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast<Rect2D const *>( &rhs ) ) {}
7186 
7187  Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
7188 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
7189 
7191  {
7192  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>( &rhs );
7193  return *this;
7194  }
7195 
7196 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7198  {
7199  offset = offset_;
7200  return *this;
7201  }
7202 
7204  {
7205  extent = extent_;
7206  return *this;
7207  }
7208 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7209 
7210  operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT
7211  {
7212  return *reinterpret_cast<const VkRect2D *>( this );
7213  }
7214 
7216  {
7217  return *reinterpret_cast<VkRect2D *>( this );
7218  }
7219 
7220 #if defined( VULKAN_HPP_USE_REFLECT )
7221 # if 14 <= VULKAN_HPP_CPP_VERSION
7222  auto
7223 # else
7224  std::tuple<VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
7225 # endif
7226  reflect() const VULKAN_HPP_NOEXCEPT
7227  {
7228  return std::tie( offset, extent );
7229  }
7230 #endif
7231 
7232 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
7233  auto operator<=>( Rect2D const & ) const = default;
7234 #else
7235  bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
7236  {
7237 # if defined( VULKAN_HPP_USE_REFLECT )
7238  return this->reflect() == rhs.reflect();
7239 # else
7240  return ( offset == rhs.offset ) && ( extent == rhs.extent );
7241 # endif
7242  }
7243 
7244  bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
7245  {
7246  return !operator==( rhs );
7247  }
7248 #endif
7249 
7250  public:
7253  };
7254 
7256  {
7258 
7259  static const bool allowDuplicate = false;
7261 
7262 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
7263  VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
7264  const uint32_t * pDeviceIndices_ = {},
7265  uint32_t splitInstanceBindRegionCount_ = {},
7266  const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {},
7267  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
7268  : pNext( pNext_ )
7269  , deviceIndexCount( deviceIndexCount_ )
7270  , pDeviceIndices( pDeviceIndices_ )
7271  , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
7272  , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
7273  {
7274  }
7275 
7277 
7279  : BindImageMemoryDeviceGroupInfo( *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs ) )
7280  {
7281  }
7282 
7283 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
7286  const void * pNext_ = nullptr )
7287  : pNext( pNext_ )
7288  , deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
7289  , pDeviceIndices( deviceIndices_.data() )
7290  , splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) )
7291  , pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
7292  {
7293  }
7294 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
7295 
7297 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
7298 
7300  {
7301  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>( &rhs );
7302  return *this;
7303  }
7304 
7305 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7307  {
7308  pNext = pNext_;
7309  return *this;
7310  }
7311 
7313  {
7314  deviceIndexCount = deviceIndexCount_;
7315  return *this;
7316  }
7317 
7319  {
7320  pDeviceIndices = pDeviceIndices_;
7321  return *this;
7322  }
7323 
7324 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
7327  {
7328  deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
7329  pDeviceIndices = deviceIndices_.data();
7330  return *this;
7331  }
7332 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
7333 
7335  {
7336  splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
7337  return *this;
7338  }
7339 
7342  {
7343  pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
7344  return *this;
7345  }
7346 
7347 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
7350  {
7351  splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
7352  pSplitInstanceBindRegions = splitInstanceBindRegions_.data();
7353  return *this;
7354  }
7355 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
7356 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7357 
7359  {
7360  return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo *>( this );
7361  }
7362 
7364  {
7365  return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo *>( this );
7366  }
7367 
7368 #if defined( VULKAN_HPP_USE_REFLECT )
7369 # if 14 <= VULKAN_HPP_CPP_VERSION
7370  auto
7371 # else
7372  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
7373  const void * const &,
7374  uint32_t const &,
7375  const uint32_t * const &,
7376  uint32_t const &,
7377  const VULKAN_HPP_NAMESPACE::Rect2D * const &>
7378 # endif
7379  reflect() const VULKAN_HPP_NOEXCEPT
7380  {
7382  }
7383 #endif
7384 
7385 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
7386  auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default;
7387 #else
7389  {
7390 # if defined( VULKAN_HPP_USE_REFLECT )
7391  return this->reflect() == rhs.reflect();
7392 # else
7393  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ) &&
7394  ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
7395 # endif
7396  }
7397 
7399  {
7400  return !operator==( rhs );
7401  }
7402 #endif
7403 
7404  public:
7406  const void * pNext = {};
7407  uint32_t deviceIndexCount = {};
7408  const uint32_t * pDeviceIndices = {};
7411  };
7412 
7413  template <>
7415  {
7417  };
7419 
7421  {
7423 
7424  static const bool allowDuplicate = false;
7426 
7427 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
7430  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
7431  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
7432  : pNext( pNext_ )
7433  , image( image_ )
7434  , memory( memory_ )
7435  , memoryOffset( memoryOffset_ )
7436  {
7437  }
7438 
7440 
7441  BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
7442  {
7443  }
7444 
7446 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
7447 
7449  {
7450  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>( &rhs );
7451  return *this;
7452  }
7453 
7454 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7456  {
7457  pNext = pNext_;
7458  return *this;
7459  }
7460 
7462  {
7463  image = image_;
7464  return *this;
7465  }
7466 
7468  {
7469  memory = memory_;
7470  return *this;
7471  }
7472 
7474  {
7475  memoryOffset = memoryOffset_;
7476  return *this;
7477  }
7478 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7479 
7481  {
7482  return *reinterpret_cast<const VkBindImageMemoryInfo *>( this );
7483  }
7484 
7486  {
7487  return *reinterpret_cast<VkBindImageMemoryInfo *>( this );
7488  }
7489 
7490 #if defined( VULKAN_HPP_USE_REFLECT )
7491 # if 14 <= VULKAN_HPP_CPP_VERSION
7492  auto
7493 # else
7494  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
7495  const void * const &,
7499 # endif
7500  reflect() const VULKAN_HPP_NOEXCEPT
7501  {
7502  return std::tie( sType, pNext, image, memory, memoryOffset );
7503  }
7504 #endif
7505 
7506 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
7507  auto operator<=>( BindImageMemoryInfo const & ) const = default;
7508 #else
7510  {
7511 # if defined( VULKAN_HPP_USE_REFLECT )
7512  return this->reflect() == rhs.reflect();
7513 # else
7514  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
7515 # endif
7516  }
7517 
7519  {
7520  return !operator==( rhs );
7521  }
7522 #endif
7523 
7524  public:
7526  const void * pNext = {};
7530  };
7531 
7532  template <>
7534  {
7536  };
7538 
7540  {
7542 
7543  static const bool allowDuplicate = false;
7545 
7546 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
7548  uint32_t imageIndex_ = {},
7549  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
7550  : pNext( pNext_ )
7551  , swapchain( swapchain_ )
7552  , imageIndex( imageIndex_ )
7553  {
7554  }
7555 
7557 
7559  : BindImageMemorySwapchainInfoKHR( *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs ) )
7560  {
7561  }
7562 
7564 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
7565 
7567  {
7568  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>( &rhs );
7569  return *this;
7570  }
7571 
7572 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7574  {
7575  pNext = pNext_;
7576  return *this;
7577  }
7578 
7580  {
7581  swapchain = swapchain_;
7582  return *this;
7583  }
7584 
7586  {
7587  imageIndex = imageIndex_;
7588  return *this;
7589  }
7590 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7591 
7593  {
7594  return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>( this );
7595  }
7596 
7598  {
7599  return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR *>( this );
7600  }
7601 
7602 #if defined( VULKAN_HPP_USE_REFLECT )
7603 # if 14 <= VULKAN_HPP_CPP_VERSION
7604  auto
7605 # else
7606  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint32_t const &>
7607 # endif
7608  reflect() const VULKAN_HPP_NOEXCEPT
7609  {
7610  return std::tie( sType, pNext, swapchain, imageIndex );
7611  }
7612 #endif
7613 
7614 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
7615  auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default;
7616 #else
7618  {
7619 # if defined( VULKAN_HPP_USE_REFLECT )
7620  return this->reflect() == rhs.reflect();
7621 # else
7622  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndex == rhs.imageIndex );
7623 # endif
7624  }
7625 
7627  {
7628  return !operator==( rhs );
7629  }
7630 #endif
7631 
7632  public:
7634  const void * pNext = {};
7636  uint32_t imageIndex = {};
7637  };
7638 
7639  template <>
7641  {
7643  };
7644 
7646  {
7648 
7649  static const bool allowDuplicate = false;
7651 
7652 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
7654  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
7655  : pNext( pNext_ )
7656  , planeAspect( planeAspect_ )
7657  {
7658  }
7659 
7661 
7663  : BindImagePlaneMemoryInfo( *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs ) )
7664  {
7665  }
7666 
7668 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
7669 
7671  {
7672  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>( &rhs );
7673  return *this;
7674  }
7675 
7676 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7678  {
7679  pNext = pNext_;
7680  return *this;
7681  }
7682 
7684  {
7685  planeAspect = planeAspect_;
7686  return *this;
7687  }
7688 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7689 
7691  {
7692  return *reinterpret_cast<const VkBindImagePlaneMemoryInfo *>( this );
7693  }
7694 
7696  {
7697  return *reinterpret_cast<VkBindImagePlaneMemoryInfo *>( this );
7698  }
7699 
7700 #if defined( VULKAN_HPP_USE_REFLECT )
7701 # if 14 <= VULKAN_HPP_CPP_VERSION
7702  auto
7703 # else
7704  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
7705 # endif
7706  reflect() const VULKAN_HPP_NOEXCEPT
7707  {
7708  return std::tie( sType, pNext, planeAspect );
7709  }
7710 #endif
7711 
7712 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
7713  auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default;
7714 #else
7716  {
7717 # if defined( VULKAN_HPP_USE_REFLECT )
7718  return this->reflect() == rhs.reflect();
7719 # else
7720  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect );
7721 # endif
7722  }
7723 
7725  {
7726  return !operator==( rhs );
7727  }
7728 #endif
7729 
7730  public:
7732  const void * pNext = {};
7734  };
7735 
7736  template <>
7738  {
7740  };
7742 
7744  {
7746 
7747 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
7750  uint32_t size_ = {},
7752  : bufferAddress( bufferAddress_ )
7753  , size( size_ )
7754  , indexType( indexType_ )
7755  {
7756  }
7757 
7759 
7761  : BindIndexBufferIndirectCommandNV( *reinterpret_cast<BindIndexBufferIndirectCommandNV const *>( &rhs ) )
7762  {
7763  }
7764 
7766 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
7767 
7769  {
7770  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const *>( &rhs );
7771  return *this;
7772  }
7773 
7774 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7776  {
7777  bufferAddress = bufferAddress_;
7778  return *this;
7779  }
7780 
7782  {
7783  size = size_;
7784  return *this;
7785  }
7786 
7788  {
7789  indexType = indexType_;
7790  return *this;
7791  }
7792 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7793 
7795  {
7796  return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV *>( this );
7797  }
7798 
7800  {
7801  return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV *>( this );
7802  }
7803 
7804 #if defined( VULKAN_HPP_USE_REFLECT )
7805 # if 14 <= VULKAN_HPP_CPP_VERSION
7806  auto
7807 # else
7808  std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &>
7809 # endif
7810  reflect() const VULKAN_HPP_NOEXCEPT
7811  {
7812  return std::tie( bufferAddress, size, indexType );
7813  }
7814 #endif
7815 
7816 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
7817  auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default;
7818 #else
7820  {
7821 # if defined( VULKAN_HPP_USE_REFLECT )
7822  return this->reflect() == rhs.reflect();
7823 # else
7824  return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType );
7825 # endif
7826  }
7827 
7829  {
7830  return !operator==( rhs );
7831  }
7832 #endif
7833 
7834  public:
7836  uint32_t size = {};
7838  };
7839 
7841  {
7843 
7844 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
7846 
7848 
7850  : BindShaderGroupIndirectCommandNV( *reinterpret_cast<BindShaderGroupIndirectCommandNV const *>( &rhs ) )
7851  {
7852  }
7853 
7855 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
7856 
7858  {
7859  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const *>( &rhs );
7860  return *this;
7861  }
7862 
7863 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7865  {
7866  groupIndex = groupIndex_;
7867  return *this;
7868  }
7869 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7870 
7872  {
7873  return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV *>( this );
7874  }
7875 
7877  {
7878  return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV *>( this );
7879  }
7880 
7881 #if defined( VULKAN_HPP_USE_REFLECT )
7882 # if 14 <= VULKAN_HPP_CPP_VERSION
7883  auto
7884 # else
7885  std::tuple<uint32_t const &>
7886 # endif
7887  reflect() const VULKAN_HPP_NOEXCEPT
7888  {
7889  return std::tie( groupIndex );
7890  }
7891 #endif
7892 
7893 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
7894  auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default;
7895 #else
7897  {
7898 # if defined( VULKAN_HPP_USE_REFLECT )
7899  return this->reflect() == rhs.reflect();
7900 # else
7901  return ( groupIndex == rhs.groupIndex );
7902 # endif
7903  }
7904 
7906  {
7907  return !operator==( rhs );
7908  }
7909 #endif
7910 
7911  public:
7912  uint32_t groupIndex = {};
7913  };
7914 
7916  {
7918 
7919 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
7923  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
7925  : resourceOffset( resourceOffset_ )
7926  , size( size_ )
7927  , memory( memory_ )
7928  , memoryOffset( memoryOffset_ )
7929  , flags( flags_ )
7930  {
7931  }
7932 
7934 
7935  SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT : SparseMemoryBind( *reinterpret_cast<SparseMemoryBind const *>( &rhs ) ) {}
7936 
7938 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
7939 
7941  {
7942  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>( &rhs );
7943  return *this;
7944  }
7945 
7946 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
7948  {
7949  resourceOffset = resourceOffset_;
7950  return *this;
7951  }
7952 
7954  {
7955  size = size_;
7956  return *this;
7957  }
7958 
7960  {
7961  memory = memory_;
7962  return *this;
7963  }
7964 
7966  {
7967  memoryOffset = memoryOffset_;
7968  return *this;
7969  }
7970 
7972  {
7973  flags = flags_;
7974  return *this;
7975  }
7976 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
7977 
7978  operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT
7979  {
7980  return *reinterpret_cast<const VkSparseMemoryBind *>( this );
7981  }
7982 
7984  {
7985  return *reinterpret_cast<VkSparseMemoryBind *>( this );
7986  }
7987 
7988 #if defined( VULKAN_HPP_USE_REFLECT )
7989 # if 14 <= VULKAN_HPP_CPP_VERSION
7990  auto
7991 # else
7992  std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &,
7997 # endif
7998  reflect() const VULKAN_HPP_NOEXCEPT
7999  {
8000  return std::tie( resourceOffset, size, memory, memoryOffset, flags );
8001  }
8002 #endif
8003 
8004 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
8005  auto operator<=>( SparseMemoryBind const & ) const = default;
8006 #else
8008  {
8009 # if defined( VULKAN_HPP_USE_REFLECT )
8010  return this->reflect() == rhs.reflect();
8011 # else
8012  return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) &&
8013  ( flags == rhs.flags );
8014 # endif
8015  }
8016 
8018  {
8019  return !operator==( rhs );
8020  }
8021 #endif
8022 
8023  public:
8029  };
8030 
8032  {
8034 
8035 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
8037  uint32_t bindCount_ = {},
8039  : buffer( buffer_ )
8040  , bindCount( bindCount_ )
8041  , pBinds( pBinds_ )
8042  {
8043  }
8044 
8046 
8048  : SparseBufferMemoryBindInfo( *reinterpret_cast<SparseBufferMemoryBindInfo const *>( &rhs ) )
8049  {
8050  }
8051 
8052 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8055  : buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
8056  {
8057  }
8058 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8059 
8061 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
8062 
8064  {
8065  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>( &rhs );
8066  return *this;
8067  }
8068 
8069 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
8071  {
8072  buffer = buffer_;
8073  return *this;
8074  }
8075 
8077  {
8078  bindCount = bindCount_;
8079  return *this;
8080  }
8081 
8083  {
8084  pBinds = pBinds_;
8085  return *this;
8086  }
8087 
8088 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8091  {
8092  bindCount = static_cast<uint32_t>( binds_.size() );
8093  pBinds = binds_.data();
8094  return *this;
8095  }
8096 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8097 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
8098 
8100  {
8101  return *reinterpret_cast<const VkSparseBufferMemoryBindInfo *>( this );
8102  }
8103 
8105  {
8106  return *reinterpret_cast<VkSparseBufferMemoryBindInfo *>( this );
8107  }
8108 
8109 #if defined( VULKAN_HPP_USE_REFLECT )
8110 # if 14 <= VULKAN_HPP_CPP_VERSION
8111  auto
8112 # else
8113  std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
8114 # endif
8115  reflect() const VULKAN_HPP_NOEXCEPT
8116  {
8117  return std::tie( buffer, bindCount, pBinds );
8118  }
8119 #endif
8120 
8121 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
8122  auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default;
8123 #else
8125  {
8126 # if defined( VULKAN_HPP_USE_REFLECT )
8127  return this->reflect() == rhs.reflect();
8128 # else
8129  return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
8130 # endif
8131  }
8132 
8134  {
8135  return !operator==( rhs );
8136  }
8137 #endif
8138 
8139  public:
8141  uint32_t bindCount = {};
8143  };
8144 
8146  {
8148 
8149 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
8151  uint32_t bindCount_ = {},
8153  : image( image_ )
8154  , bindCount( bindCount_ )
8155  , pBinds( pBinds_ )
8156  {
8157  }
8158 
8160 
8162  : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast<SparseImageOpaqueMemoryBindInfo const *>( &rhs ) )
8163  {
8164  }
8165 
8166 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8169  : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
8170  {
8171  }
8172 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8173 
8175 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
8176 
8178  {
8179  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>( &rhs );
8180  return *this;
8181  }
8182 
8183 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
8185  {
8186  image = image_;
8187  return *this;
8188  }
8189 
8191  {
8192  bindCount = bindCount_;
8193  return *this;
8194  }
8195 
8197  {
8198  pBinds = pBinds_;
8199  return *this;
8200  }
8201 
8202 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8205  {
8206  bindCount = static_cast<uint32_t>( binds_.size() );
8207  pBinds = binds_.data();
8208  return *this;
8209  }
8210 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8211 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
8212 
8214  {
8215  return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo *>( this );
8216  }
8217 
8219  {
8220  return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo *>( this );
8221  }
8222 
8223 #if defined( VULKAN_HPP_USE_REFLECT )
8224 # if 14 <= VULKAN_HPP_CPP_VERSION
8225  auto
8226 # else
8227  std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
8228 # endif
8229  reflect() const VULKAN_HPP_NOEXCEPT
8230  {
8231  return std::tie( image, bindCount, pBinds );
8232  }
8233 #endif
8234 
8235 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
8236  auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default;
8237 #else
8239  {
8240 # if defined( VULKAN_HPP_USE_REFLECT )
8241  return this->reflect() == rhs.reflect();
8242 # else
8243  return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
8244 # endif
8245  }
8246 
8248  {
8249  return !operator==( rhs );
8250  }
8251 #endif
8252 
8253  public:
8255  uint32_t bindCount = {};
8257  };
8258 
8260  {
8262 
8263 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
8265  ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT
8266  : aspectMask( aspectMask_ )
8267  , mipLevel( mipLevel_ )
8268  , arrayLayer( arrayLayer_ )
8269  {
8270  }
8271 
8273 
8274  ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) ) {}
8275 
8277 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
8278 
8280  {
8281  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>( &rhs );
8282  return *this;
8283  }
8284 
8285 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
8287  {
8288  aspectMask = aspectMask_;
8289  return *this;
8290  }
8291 
8293  {
8294  mipLevel = mipLevel_;
8295  return *this;
8296  }
8297 
8299  {
8300  arrayLayer = arrayLayer_;
8301  return *this;
8302  }
8303 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
8304 
8305  operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT
8306  {
8307  return *reinterpret_cast<const VkImageSubresource *>( this );
8308  }
8309 
8311  {
8312  return *reinterpret_cast<VkImageSubresource *>( this );
8313  }
8314 
8315 #if defined( VULKAN_HPP_USE_REFLECT )
8316 # if 14 <= VULKAN_HPP_CPP_VERSION
8317  auto
8318 # else
8319  std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &>
8320 # endif
8321  reflect() const VULKAN_HPP_NOEXCEPT
8322  {
8323  return std::tie( aspectMask, mipLevel, arrayLayer );
8324  }
8325 #endif
8326 
8327 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
8328  auto operator<=>( ImageSubresource const & ) const = default;
8329 #else
8331  {
8332 # if defined( VULKAN_HPP_USE_REFLECT )
8333  return this->reflect() == rhs.reflect();
8334 # else
8335  return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer );
8336 # endif
8337  }
8338 
8340  {
8341  return !operator==( rhs );
8342  }
8343 #endif
8344 
8345  public:
8347  uint32_t mipLevel = {};
8348  uint32_t arrayLayer = {};
8349  };
8350 
8351  struct Offset3D
8352  {
8354 
8355 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
8356  VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
8357  : x( x_ )
8358  , y( y_ )
8359  , z( z_ )
8360  {
8361  }
8362 
8363  VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
8364 
8365  Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast<Offset3D const *>( &rhs ) ) {}
8366 
8367  explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {}
8368 
8369  Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
8370 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
8371 
8373  {
8374  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>( &rhs );
8375  return *this;
8376  }
8377 
8378 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
8380  {
8381  x = x_;
8382  return *this;
8383  }
8384 
8386  {
8387  y = y_;
8388  return *this;
8389  }
8390 
8392  {
8393  z = z_;
8394  return *this;
8395  }
8396 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
8397 
8398  operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT
8399  {
8400  return *reinterpret_cast<const VkOffset3D *>( this );
8401  }
8402 
8404  {
8405  return *reinterpret_cast<VkOffset3D *>( this );
8406  }
8407 
8408 #if defined( VULKAN_HPP_USE_REFLECT )
8409 # if 14 <= VULKAN_HPP_CPP_VERSION
8410  auto
8411 # else
8412  std::tuple<int32_t const &, int32_t const &, int32_t const &>
8413 # endif
8414  reflect() const VULKAN_HPP_NOEXCEPT
8415  {
8416  return std::tie( x, y, z );
8417  }
8418 #endif
8419 
8420 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
8421  auto operator<=>( Offset3D const & ) const = default;
8422 #else
8423  bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
8424  {
8425 # if defined( VULKAN_HPP_USE_REFLECT )
8426  return this->reflect() == rhs.reflect();
8427 # else
8428  return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z );
8429 # endif
8430  }
8431 
8432  bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
8433  {
8434  return !operator==( rhs );
8435  }
8436 #endif
8437 
8438  public:
8439  int32_t x = {};
8440  int32_t y = {};
8441  int32_t z = {};
8442  };
8443 
8444  struct Extent3D
8445  {
8447 
8448 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
8449  VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
8450  : width( width_ )
8451  , height( height_ )
8452  , depth( depth_ )
8453  {
8454  }
8455 
8456  VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
8457 
8458  Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast<Extent3D const *>( &rhs ) ) {}
8459 
8460  explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {}
8461 
8462  Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
8463 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
8464 
8466  {
8467  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>( &rhs );
8468  return *this;
8469  }
8470 
8471 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
8473  {
8474  width = width_;
8475  return *this;
8476  }
8477 
8479  {
8480  height = height_;
8481  return *this;
8482  }
8483 
8485  {
8486  depth = depth_;
8487  return *this;
8488  }
8489 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
8490 
8491  operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT
8492  {
8493  return *reinterpret_cast<const VkExtent3D *>( this );
8494  }
8495 
8497  {
8498  return *reinterpret_cast<VkExtent3D *>( this );
8499  }
8500 
8501 #if defined( VULKAN_HPP_USE_REFLECT )
8502 # if 14 <= VULKAN_HPP_CPP_VERSION
8503  auto
8504 # else
8505  std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
8506 # endif
8507  reflect() const VULKAN_HPP_NOEXCEPT
8508  {
8509  return std::tie( width, height, depth );
8510  }
8511 #endif
8512 
8513 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
8514  auto operator<=>( Extent3D const & ) const = default;
8515 #else
8516  bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
8517  {
8518 # if defined( VULKAN_HPP_USE_REFLECT )
8519  return this->reflect() == rhs.reflect();
8520 # else
8521  return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth );
8522 # endif
8523  }
8524 
8525  bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
8526  {
8527  return !operator==( rhs );
8528  }
8529 #endif
8530 
8531  public:
8532  uint32_t width = {};
8533  uint32_t height = {};
8534  uint32_t depth = {};
8535  };
8536 
8538  {
8540 
8541 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
8543  VULKAN_HPP_NAMESPACE::Offset3D offset_ = {},
8544  VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
8546  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
8548  : subresource( subresource_ )
8549  , offset( offset_ )
8550  , extent( extent_ )
8551  , memory( memory_ )
8552  , memoryOffset( memoryOffset_ )
8553  , flags( flags_ )
8554  {
8555  }
8556 
8558 
8560  : SparseImageMemoryBind( *reinterpret_cast<SparseImageMemoryBind const *>( &rhs ) )
8561  {
8562  }
8563 
8565 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
8566 
8568  {
8569  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>( &rhs );
8570  return *this;
8571  }
8572 
8573 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
8575  {
8576  subresource = subresource_;
8577  return *this;
8578  }
8579 
8581  {
8582  offset = offset_;
8583  return *this;
8584  }
8585 
8587  {
8588  extent = extent_;
8589  return *this;
8590  }
8591 
8593  {
8594  memory = memory_;
8595  return *this;
8596  }
8597 
8599  {
8600  memoryOffset = memoryOffset_;
8601  return *this;
8602  }
8603 
8605  {
8606  flags = flags_;
8607  return *this;
8608  }
8609 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
8610 
8612  {
8613  return *reinterpret_cast<const VkSparseImageMemoryBind *>( this );
8614  }
8615 
8617  {
8618  return *reinterpret_cast<VkSparseImageMemoryBind *>( this );
8619  }
8620 
8621 #if defined( VULKAN_HPP_USE_REFLECT )
8622 # if 14 <= VULKAN_HPP_CPP_VERSION
8623  auto
8624 # else
8625  std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresource const &,
8631 # endif
8632  reflect() const VULKAN_HPP_NOEXCEPT
8633  {
8634  return std::tie( subresource, offset, extent, memory, memoryOffset, flags );
8635  }
8636 #endif
8637 
8638 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
8639  auto operator<=>( SparseImageMemoryBind const & ) const = default;
8640 #else
8642  {
8643 # if defined( VULKAN_HPP_USE_REFLECT )
8644  return this->reflect() == rhs.reflect();
8645 # else
8646  return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( memory == rhs.memory ) &&
8647  ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags );
8648 # endif
8649  }
8650 
8652  {
8653  return !operator==( rhs );
8654  }
8655 #endif
8656 
8657  public:
8664  };
8665 
8667  {
8669 
8670 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
8672  uint32_t bindCount_ = {},
8674  : image( image_ )
8675  , bindCount( bindCount_ )
8676  , pBinds( pBinds_ )
8677  {
8678  }
8679 
8681 
8683  : SparseImageMemoryBindInfo( *reinterpret_cast<SparseImageMemoryBindInfo const *>( &rhs ) )
8684  {
8685  }
8686 
8687 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8690  : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
8691  {
8692  }
8693 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8694 
8696 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
8697 
8699  {
8700  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>( &rhs );
8701  return *this;
8702  }
8703 
8704 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
8706  {
8707  image = image_;
8708  return *this;
8709  }
8710 
8712  {
8713  bindCount = bindCount_;
8714  return *this;
8715  }
8716 
8718  {
8719  pBinds = pBinds_;
8720  return *this;
8721  }
8722 
8723 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8726  {
8727  bindCount = static_cast<uint32_t>( binds_.size() );
8728  pBinds = binds_.data();
8729  return *this;
8730  }
8731 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8732 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
8733 
8735  {
8736  return *reinterpret_cast<const VkSparseImageMemoryBindInfo *>( this );
8737  }
8738 
8740  {
8741  return *reinterpret_cast<VkSparseImageMemoryBindInfo *>( this );
8742  }
8743 
8744 #if defined( VULKAN_HPP_USE_REFLECT )
8745 # if 14 <= VULKAN_HPP_CPP_VERSION
8746  auto
8747 # else
8748  std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * const &>
8749 # endif
8750  reflect() const VULKAN_HPP_NOEXCEPT
8751  {
8752  return std::tie( image, bindCount, pBinds );
8753  }
8754 #endif
8755 
8756 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
8757  auto operator<=>( SparseImageMemoryBindInfo const & ) const = default;
8758 #else
8760  {
8761 # if defined( VULKAN_HPP_USE_REFLECT )
8762  return this->reflect() == rhs.reflect();
8763 # else
8764  return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
8765 # endif
8766  }
8767 
8769  {
8770  return !operator==( rhs );
8771  }
8772 #endif
8773 
8774  public:
8776  uint32_t bindCount = {};
8778  };
8779 
8781  {
8783 
8784  static const bool allowDuplicate = false;
8786 
8787 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
8788  VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {},
8789  const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {},
8790  uint32_t bufferBindCount_ = {},
8791  const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {},
8792  uint32_t imageOpaqueBindCount_ = {},
8793  const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {},
8794  uint32_t imageBindCount_ = {},
8795  const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {},
8796  uint32_t signalSemaphoreCount_ = {},
8797  const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {},
8798  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
8799  : pNext( pNext_ )
8800  , waitSemaphoreCount( waitSemaphoreCount_ )
8801  , pWaitSemaphores( pWaitSemaphores_ )
8802  , bufferBindCount( bufferBindCount_ )
8803  , pBufferBinds( pBufferBinds_ )
8804  , imageOpaqueBindCount( imageOpaqueBindCount_ )
8805  , pImageOpaqueBinds( pImageOpaqueBinds_ )
8806  , imageBindCount( imageBindCount_ )
8807  , pImageBinds( pImageBinds_ )
8808  , signalSemaphoreCount( signalSemaphoreCount_ )
8809  , pSignalSemaphores( pSignalSemaphores_ )
8810  {
8811  }
8812 
8814 
8815  BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindSparseInfo( *reinterpret_cast<BindSparseInfo const *>( &rhs ) ) {}
8816 
8817 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8823  const void * pNext_ = nullptr )
8824  : pNext( pNext_ )
8825  , waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
8826  , pWaitSemaphores( waitSemaphores_.data() )
8827  , bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) )
8828  , pBufferBinds( bufferBinds_.data() )
8829  , imageOpaqueBindCount( static_cast<uint32_t>( imageOpaqueBinds_.size() ) )
8830  , pImageOpaqueBinds( imageOpaqueBinds_.data() )
8831  , imageBindCount( static_cast<uint32_t>( imageBinds_.size() ) )
8832  , pImageBinds( imageBinds_.data() )
8833  , signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) )
8834  , pSignalSemaphores( signalSemaphores_.data() )
8835  {
8836  }
8837 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8838 
8839  BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
8840 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
8841 
8843  {
8844  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>( &rhs );
8845  return *this;
8846  }
8847 
8848 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
8850  {
8851  pNext = pNext_;
8852  return *this;
8853  }
8854 
8856  {
8857  waitSemaphoreCount = waitSemaphoreCount_;
8858  return *this;
8859  }
8860 
8862  {
8863  pWaitSemaphores = pWaitSemaphores_;
8864  return *this;
8865  }
8866 
8867 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8868  BindSparseInfo &
8870  {
8871  waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
8872  pWaitSemaphores = waitSemaphores_.data();
8873  return *this;
8874  }
8875 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8876 
8878  {
8879  bufferBindCount = bufferBindCount_;
8880  return *this;
8881  }
8882 
8884  {
8885  pBufferBinds = pBufferBinds_;
8886  return *this;
8887  }
8888 
8889 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8892  {
8893  bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
8894  pBufferBinds = bufferBinds_.data();
8895  return *this;
8896  }
8897 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8898 
8900  {
8901  imageOpaqueBindCount = imageOpaqueBindCount_;
8902  return *this;
8903  }
8904 
8907  {
8908  pImageOpaqueBinds = pImageOpaqueBinds_;
8909  return *this;
8910  }
8911 
8912 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8915  {
8916  imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
8917  pImageOpaqueBinds = imageOpaqueBinds_.data();
8918  return *this;
8919  }
8920 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8921 
8923  {
8924  imageBindCount = imageBindCount_;
8925  return *this;
8926  }
8927 
8929  {
8930  pImageBinds = pImageBinds_;
8931  return *this;
8932  }
8933 
8934 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8937  {
8938  imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
8939  pImageBinds = imageBinds_.data();
8940  return *this;
8941  }
8942 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8943 
8945  {
8946  signalSemaphoreCount = signalSemaphoreCount_;
8947  return *this;
8948  }
8949 
8951  {
8952  pSignalSemaphores = pSignalSemaphores_;
8953  return *this;
8954  }
8955 
8956 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
8957  BindSparseInfo &
8959  {
8960  signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
8961  pSignalSemaphores = signalSemaphores_.data();
8962  return *this;
8963  }
8964 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8965 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
8966 
8967  operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
8968  {
8969  return *reinterpret_cast<const VkBindSparseInfo *>( this );
8970  }
8971 
8973  {
8974  return *reinterpret_cast<VkBindSparseInfo *>( this );
8975  }
8976 
8977 #if defined( VULKAN_HPP_USE_REFLECT )
8978 # if 14 <= VULKAN_HPP_CPP_VERSION
8979  auto
8980 # else
8981  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
8982  const void * const &,
8983  uint32_t const &,
8984  const VULKAN_HPP_NAMESPACE::Semaphore * const &,
8985  uint32_t const &,
8987  uint32_t const &,
8989  uint32_t const &,
8991  uint32_t const &,
8992  const VULKAN_HPP_NAMESPACE::Semaphore * const &>
8993 # endif
8994  reflect() const VULKAN_HPP_NOEXCEPT
8995  {
8996  return std::tie( sType,
8997  pNext,
9001  pBufferBinds,
9005  pImageBinds,
9008  }
9009 #endif
9010 
9011 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
9012  auto operator<=>( BindSparseInfo const & ) const = default;
9013 #else
9015  {
9016 # if defined( VULKAN_HPP_USE_REFLECT )
9017  return this->reflect() == rhs.reflect();
9018 # else
9019  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
9020  ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && ( pBufferBinds == rhs.pBufferBinds ) &&
9021  ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) &&
9022  ( imageBindCount == rhs.imageBindCount ) && ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
9023  ( pSignalSemaphores == rhs.pSignalSemaphores );
9024 # endif
9025  }
9026 
9028  {
9029  return !operator==( rhs );
9030  }
9031 #endif
9032 
9033  public:
9035  const void * pNext = {};
9036  uint32_t waitSemaphoreCount = {};
9038  uint32_t bufferBindCount = {};
9040  uint32_t imageOpaqueBindCount = {};
9042  uint32_t imageBindCount = {};
9044  uint32_t signalSemaphoreCount = {};
9046  };
9047 
9048  template <>
9050  {
9052  };
9053 
9055  {
9057 
9058 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
9060  uint32_t size_ = {},
9061  uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
9062  : bufferAddress( bufferAddress_ )
9063  , size( size_ )
9064  , stride( stride_ )
9065  {
9066  }
9067 
9069 
9071  : BindVertexBufferIndirectCommandNV( *reinterpret_cast<BindVertexBufferIndirectCommandNV const *>( &rhs ) )
9072  {
9073  }
9074 
9076 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
9077 
9079  {
9080  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const *>( &rhs );
9081  return *this;
9082  }
9083 
9084 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
9086  {
9087  bufferAddress = bufferAddress_;
9088  return *this;
9089  }
9090 
9092  {
9093  size = size_;
9094  return *this;
9095  }
9096 
9098  {
9099  stride = stride_;
9100  return *this;
9101  }
9102 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
9103 
9105  {
9106  return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV *>( this );
9107  }
9108 
9110  {
9111  return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV *>( this );
9112  }
9113 
9114 #if defined( VULKAN_HPP_USE_REFLECT )
9115 # if 14 <= VULKAN_HPP_CPP_VERSION
9116  auto
9117 # else
9118  std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, uint32_t const &>
9119 # endif
9120  reflect() const VULKAN_HPP_NOEXCEPT
9121  {
9122  return std::tie( bufferAddress, size, stride );
9123  }
9124 #endif
9125 
9126 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
9127  auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default;
9128 #else
9130  {
9131 # if defined( VULKAN_HPP_USE_REFLECT )
9132  return this->reflect() == rhs.reflect();
9133 # else
9134  return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride );
9135 # endif
9136  }
9137 
9139  {
9140  return !operator==( rhs );
9141  }
9142 #endif
9143 
9144  public:
9146  uint32_t size = {};
9147  uint32_t stride = {};
9148  };
9149 
9150 #if defined( VK_ENABLE_BETA_EXTENSIONS )
9151  struct BindVideoSessionMemoryInfoKHR
9152  {
9153  using NativeType = VkBindVideoSessionMemoryInfoKHR;
9154 
9155  static const bool allowDuplicate = false;
9156  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindVideoSessionMemoryInfoKHR;
9157 
9158 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
9159  VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( uint32_t memoryBindIndex_ = {},
9161  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
9162  VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {},
9163  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
9164  : pNext( pNext_ )
9165  , memoryBindIndex( memoryBindIndex_ )
9166  , memory( memory_ )
9167  , memoryOffset( memoryOffset_ )
9168  , memorySize( memorySize_ )
9169  {
9170  }
9171 
9172  VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
9173 
9174  BindVideoSessionMemoryInfoKHR( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
9175  : BindVideoSessionMemoryInfoKHR( *reinterpret_cast<BindVideoSessionMemoryInfoKHR const *>( &rhs ) )
9176  {
9177  }
9178 
9179  BindVideoSessionMemoryInfoKHR & operator=( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
9180 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
9181 
9182  BindVideoSessionMemoryInfoKHR & operator=( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
9183  {
9184  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR const *>( &rhs );
9185  return *this;
9186  }
9187 
9188 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
9189  VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
9190  {
9191  pNext = pNext_;
9192  return *this;
9193  }
9194 
9195  VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
9196  {
9197  memoryBindIndex = memoryBindIndex_;
9198  return *this;
9199  }
9200 
9201  VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
9202  {
9203  memory = memory_;
9204  return *this;
9205  }
9206 
9207  VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
9208  {
9209  memoryOffset = memoryOffset_;
9210  return *this;
9211  }
9212 
9213  VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT
9214  {
9215  memorySize = memorySize_;
9216  return *this;
9217  }
9218 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
9219 
9220  operator VkBindVideoSessionMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
9221  {
9222  return *reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( this );
9223  }
9224 
9226  {
9227  return *reinterpret_cast<VkBindVideoSessionMemoryInfoKHR *>( this );
9228  }
9229 
9230 # if defined( VULKAN_HPP_USE_REFLECT )
9231 # if 14 <= VULKAN_HPP_CPP_VERSION
9232  auto
9233 # else
9234  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
9235  const void * const &,
9236  uint32_t const &,
9240 # endif
9241  reflect() const VULKAN_HPP_NOEXCEPT
9242  {
9243  return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize );
9244  }
9245 # endif
9246 
9247 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
9248  auto operator<=>( BindVideoSessionMemoryInfoKHR const & ) const = default;
9249 # else
9250  bool operator==( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
9251  {
9252 # if defined( VULKAN_HPP_USE_REFLECT )
9253  return this->reflect() == rhs.reflect();
9254 # else
9255  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memory == rhs.memory ) &&
9256  ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize );
9257 # endif
9258  }
9259 
9260  bool operator!=( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
9261  {
9262  return !operator==( rhs );
9263  }
9264 # endif
9265 
9266  public:
9267  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindVideoSessionMemoryInfoKHR;
9268  const void * pNext = {};
9269  uint32_t memoryBindIndex = {};
9271  VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
9272  VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {};
9273  };
9274 
9275  template <>
9276  struct CppType<StructureType, StructureType::eBindVideoSessionMemoryInfoKHR>
9277  {
9278  using Type = BindVideoSessionMemoryInfoKHR;
9279  };
9280 #endif /*VK_ENABLE_BETA_EXTENSIONS*/
9281 
9283  {
9285 
9286 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
9288  uint32_t mipLevel_ = {},
9289  uint32_t baseArrayLayer_ = {},
9290  uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
9291  : aspectMask( aspectMask_ )
9292  , mipLevel( mipLevel_ )
9293  , baseArrayLayer( baseArrayLayer_ )
9294  , layerCount( layerCount_ )
9295  {
9296  }
9297 
9299 
9301  : ImageSubresourceLayers( *reinterpret_cast<ImageSubresourceLayers const *>( &rhs ) )
9302  {
9303  }
9304 
9306 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
9307 
9309  {
9310  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>( &rhs );
9311  return *this;
9312  }
9313 
9314 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
9316  {
9317  aspectMask = aspectMask_;
9318  return *this;
9319  }
9320 
9322  {
9323  mipLevel = mipLevel_;
9324  return *this;
9325  }
9326 
9328  {
9329  baseArrayLayer = baseArrayLayer_;
9330  return *this;
9331  }
9332 
9334  {
9335  layerCount = layerCount_;
9336  return *this;
9337  }
9338 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
9339 
9341  {
9342  return *reinterpret_cast<const VkImageSubresourceLayers *>( this );
9343  }
9344 
9346  {
9347  return *reinterpret_cast<VkImageSubresourceLayers *>( this );
9348  }
9349 
9350 #if defined( VULKAN_HPP_USE_REFLECT )
9351 # if 14 <= VULKAN_HPP_CPP_VERSION
9352  auto
9353 # else
9354  std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &>
9355 # endif
9356  reflect() const VULKAN_HPP_NOEXCEPT
9357  {
9358  return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount );
9359  }
9360 #endif
9361 
9362 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
9363  auto operator<=>( ImageSubresourceLayers const & ) const = default;
9364 #else
9366  {
9367 # if defined( VULKAN_HPP_USE_REFLECT )
9368  return this->reflect() == rhs.reflect();
9369 # else
9370  return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
9371 # endif
9372  }
9373 
9375  {
9376  return !operator==( rhs );
9377  }
9378 #endif
9379 
9380  public:
9382  uint32_t mipLevel = {};
9383  uint32_t baseArrayLayer = {};
9384  uint32_t layerCount = {};
9385  };
9386 
9387  struct ImageBlit2
9388  {
9390 
9391  static const bool allowDuplicate = false;
9393 
9394 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
9396  std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ = {},
9397  VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
9398  std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ = {},
9399  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
9400  : pNext( pNext_ )
9401  , srcSubresource( srcSubresource_ )
9402  , srcOffsets( srcOffsets_ )
9403  , dstSubresource( dstSubresource_ )
9404  , dstOffsets( dstOffsets_ )
9405  {
9406  }
9407 
9409 
9410  ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit2( *reinterpret_cast<ImageBlit2 const *>( &rhs ) ) {}
9411 
9412  ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
9413 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
9414 
9416  {
9417  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit2 const *>( &rhs );
9418  return *this;
9419  }
9420 
9421 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
9423  {
9424  pNext = pNext_;
9425  return *this;
9426  }
9427 
9429  {
9430  srcSubresource = srcSubresource_;
9431  return *this;
9432  }
9433 
9434  VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
9435  {
9436  srcOffsets = srcOffsets_;
9437  return *this;
9438  }
9439 
9441  {
9442  dstSubresource = dstSubresource_;
9443  return *this;
9444  }
9445 
9446  VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
9447  {
9448  dstOffsets = dstOffsets_;
9449  return *this;
9450  }
9451 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
9452 
9453  operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT
9454  {
9455  return *reinterpret_cast<const VkImageBlit2 *>( this );
9456  }
9457 
9459  {
9460  return *reinterpret_cast<VkImageBlit2 *>( this );
9461  }
9462 
9463 #if defined( VULKAN_HPP_USE_REFLECT )
9464 # if 14 <= VULKAN_HPP_CPP_VERSION
9465  auto
9466 # else
9467  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
9468  const void * const &,
9473 # endif
9474  reflect() const VULKAN_HPP_NOEXCEPT
9475  {
9477  }
9478 #endif
9479 
9480 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
9481  auto operator<=>( ImageBlit2 const & ) const = default;
9482 #else
9483  bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
9484  {
9485 # if defined( VULKAN_HPP_USE_REFLECT )
9486  return this->reflect() == rhs.reflect();
9487 # else
9488  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) &&
9489  ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets );
9490 # endif
9491  }
9492 
9493  bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
9494  {
9495  return !operator==( rhs );
9496  }
9497 #endif
9498 
9499  public:
9501  const void * pNext = {};
9506  };
9507 
9508  template <>
9510  {
9511  using Type = ImageBlit2;
9512  };
9513  using ImageBlit2KHR = ImageBlit2;
9514 
9516  {
9518 
9519  static const bool allowDuplicate = false;
9521 
9522 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
9525  VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
9527  uint32_t regionCount_ = {},
9528  const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {},
9530  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
9531  : pNext( pNext_ )
9532  , srcImage( srcImage_ )
9533  , srcImageLayout( srcImageLayout_ )
9534  , dstImage( dstImage_ )
9535  , dstImageLayout( dstImageLayout_ )
9536  , regionCount( regionCount_ )
9537  , pRegions( pRegions_ )
9538  , filter( filter_ )
9539  {
9540  }
9541 
9543 
9544  BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : BlitImageInfo2( *reinterpret_cast<BlitImageInfo2 const *>( &rhs ) ) {}
9545 
9546 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
9548  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
9549  VULKAN_HPP_NAMESPACE::Image dstImage_,
9550  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
9553  const void * pNext_ = nullptr )
9554  : pNext( pNext_ )
9555  , srcImage( srcImage_ )
9556  , srcImageLayout( srcImageLayout_ )
9557  , dstImage( dstImage_ )
9558  , dstImageLayout( dstImageLayout_ )
9559  , regionCount( static_cast<uint32_t>( regions_.size() ) )
9560  , pRegions( regions_.data() )
9561  , filter( filter_ )
9562  {
9563  }
9564 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
9565 
9566  BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
9567 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
9568 
9570  {
9571  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageInfo2 const *>( &rhs );
9572  return *this;
9573  }
9574 
9575 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
9577  {
9578  pNext = pNext_;
9579  return *this;
9580  }
9581 
9583  {
9584  srcImage = srcImage_;
9585  return *this;
9586  }
9587 
9589  {
9590  srcImageLayout = srcImageLayout_;
9591  return *this;
9592  }
9593 
9595  {
9596  dstImage = dstImage_;
9597  return *this;
9598  }
9599 
9601  {
9602  dstImageLayout = dstImageLayout_;
9603  return *this;
9604  }
9605 
9607  {
9608  regionCount = regionCount_;
9609  return *this;
9610  }
9611 
9613  {
9614  pRegions = pRegions_;
9615  return *this;
9616  }
9617 
9618 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
9620  {
9621  regionCount = static_cast<uint32_t>( regions_.size() );
9622  pRegions = regions_.data();
9623  return *this;
9624  }
9625 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
9626 
9628  {
9629  filter = filter_;
9630  return *this;
9631  }
9632 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
9633 
9634  operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
9635  {
9636  return *reinterpret_cast<const VkBlitImageInfo2 *>( this );
9637  }
9638 
9640  {
9641  return *reinterpret_cast<VkBlitImageInfo2 *>( this );
9642  }
9643 
9644 #if defined( VULKAN_HPP_USE_REFLECT )
9645 # if 14 <= VULKAN_HPP_CPP_VERSION
9646  auto
9647 # else
9648  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
9649  const void * const &,
9654  uint32_t const &,
9655  const VULKAN_HPP_NAMESPACE::ImageBlit2 * const &,
9657 # endif
9658  reflect() const VULKAN_HPP_NOEXCEPT
9659  {
9661  }
9662 #endif
9663 
9664 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
9665  auto operator<=>( BlitImageInfo2 const & ) const = default;
9666 #else
9668  {
9669 # if defined( VULKAN_HPP_USE_REFLECT )
9670  return this->reflect() == rhs.reflect();
9671 # else
9672  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
9673  ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ) &&
9674  ( filter == rhs.filter );
9675 # endif
9676  }
9677 
9679  {
9680  return !operator==( rhs );
9681  }
9682 #endif
9683 
9684  public:
9686  const void * pNext = {};
9691  uint32_t regionCount = {};
9694  };
9695 
9696  template <>
9698  {
9700  };
9702 
9704  {
9706 
9707  static const bool allowDuplicate = false;
9709 
9710 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
9712  : pNext( pNext_ )
9713  , buffer( buffer_ )
9714  {
9715  }
9716 
9718 
9720  : BufferCaptureDescriptorDataInfoEXT( *reinterpret_cast<BufferCaptureDescriptorDataInfoEXT const *>( &rhs ) )
9721  {
9722  }
9723 
9725 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
9726 
9728  {
9729  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT const *>( &rhs );
9730  return *this;
9731  }
9732 
9733 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
9735  {
9736  pNext = pNext_;
9737  return *this;
9738  }
9739 
9741  {
9742  buffer = buffer_;
9743  return *this;
9744  }
9745 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
9746 
9748  {
9749  return *reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( this );
9750  }
9751 
9753  {
9754  return *reinterpret_cast<VkBufferCaptureDescriptorDataInfoEXT *>( this );
9755  }
9756 
9757 #if defined( VULKAN_HPP_USE_REFLECT )
9758 # if 14 <= VULKAN_HPP_CPP_VERSION
9759  auto
9760 # else
9761  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
9762 # endif
9763  reflect() const VULKAN_HPP_NOEXCEPT
9764  {
9765  return std::tie( sType, pNext, buffer );
9766  }
9767 #endif
9768 
9769 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
9770  auto operator<=>( BufferCaptureDescriptorDataInfoEXT const & ) const = default;
9771 #else
9773  {
9774 # if defined( VULKAN_HPP_USE_REFLECT )
9775  return this->reflect() == rhs.reflect();
9776 # else
9777  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
9778 # endif
9779  }
9780 
9782  {
9783  return !operator==( rhs );
9784  }
9785 #endif
9786 
9787  public:
9789  const void * pNext = {};
9791  };
9792 
9793  template <>
9795  {
9797  };
9798 
9799 #if defined( VK_USE_PLATFORM_FUCHSIA )
9800  struct BufferCollectionBufferCreateInfoFUCHSIA
9801  {
9802  using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA;
9803 
9804  static const bool allowDuplicate = false;
9805  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
9806 
9807 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
9808  VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {},
9809  uint32_t index_ = {},
9810  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
9811  : pNext( pNext_ )
9812  , collection( collection_ )
9813  , index( index_ )
9814  {
9815  }
9816 
9817  VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
9818 
9819  BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
9820  : BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs ) )
9821  {
9822  }
9823 
9824  BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
9825 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
9826 
9827  BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
9828  {
9829  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs );
9830  return *this;
9831  }
9832 
9833 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
9834  VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
9835  {
9836  pNext = pNext_;
9837  return *this;
9838  }
9839 
9840  VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA &
9841  setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
9842  {
9843  collection = collection_;
9844  return *this;
9845  }
9846 
9847  VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
9848  {
9849  index = index_;
9850  return *this;
9851  }
9852 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
9853 
9855  {
9856  return *reinterpret_cast<const VkBufferCollectionBufferCreateInfoFUCHSIA *>( this );
9857  }
9858 
9860  {
9861  return *reinterpret_cast<VkBufferCollectionBufferCreateInfoFUCHSIA *>( this );
9862  }
9863 
9864 # if defined( VULKAN_HPP_USE_REFLECT )
9865 # if 14 <= VULKAN_HPP_CPP_VERSION
9866  auto
9867 # else
9868  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
9869 # endif
9870  reflect() const VULKAN_HPP_NOEXCEPT
9871  {
9872  return std::tie( sType, pNext, collection, index );
9873  }
9874 # endif
9875 
9876 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
9877  auto operator<=>( BufferCollectionBufferCreateInfoFUCHSIA const & ) const = default;
9878 # else
9879  bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
9880  {
9881 # if defined( VULKAN_HPP_USE_REFLECT )
9882  return this->reflect() == rhs.reflect();
9883 # else
9884  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index );
9885 # endif
9886  }
9887 
9888  bool operator!=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
9889  {
9890  return !operator==( rhs );
9891  }
9892 # endif
9893 
9894  public:
9895  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
9896  const void * pNext = {};
9897  VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
9898  uint32_t index = {};
9899  };
9900 
9901  template <>
9902  struct CppType<StructureType, StructureType::eBufferCollectionBufferCreateInfoFUCHSIA>
9903  {
9904  using Type = BufferCollectionBufferCreateInfoFUCHSIA;
9905  };
9906 #endif /*VK_USE_PLATFORM_FUCHSIA*/
9907 
9908 #if defined( VK_USE_PLATFORM_FUCHSIA )
9909  struct BufferCollectionConstraintsInfoFUCHSIA
9910  {
9911  using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA;
9912 
9913  static const bool allowDuplicate = false;
9914  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
9915 
9916 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
9917  VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( uint32_t minBufferCount_ = {},
9918  uint32_t maxBufferCount_ = {},
9919  uint32_t minBufferCountForCamping_ = {},
9920  uint32_t minBufferCountForDedicatedSlack_ = {},
9921  uint32_t minBufferCountForSharedSlack_ = {},
9922  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
9923  : pNext( pNext_ )
9924  , minBufferCount( minBufferCount_ )
9925  , maxBufferCount( maxBufferCount_ )
9926  , minBufferCountForCamping( minBufferCountForCamping_ )
9927  , minBufferCountForDedicatedSlack( minBufferCountForDedicatedSlack_ )
9928  , minBufferCountForSharedSlack( minBufferCountForSharedSlack_ )
9929  {
9930  }
9931 
9932  VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
9933 
9934  BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
9935  : BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast<BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs ) )
9936  {
9937  }
9938 
9939  BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
9940 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
9941 
9942  BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
9943  {
9944  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs );
9945  return *this;
9946  }
9947 
9948 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
9949  VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
9950  {
9951  pNext = pNext_;
9952  return *this;
9953  }
9954 
9955  VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT
9956  {
9957  minBufferCount = minBufferCount_;
9958  return *this;
9959  }
9960 
9961  VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT
9962  {
9963  maxBufferCount = maxBufferCount_;
9964  return *this;
9965  }
9966 
9967  VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT
9968  {
9969  minBufferCountForCamping = minBufferCountForCamping_;
9970  return *this;
9971  }
9972 
9973  VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &
9974  setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT
9975  {
9976  minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_;
9977  return *this;
9978  }
9979 
9980  VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &
9981  setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT
9982  {
9983  minBufferCountForSharedSlack = minBufferCountForSharedSlack_;
9984  return *this;
9985  }
9986 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
9987 
9989  {
9990  return *reinterpret_cast<const VkBufferCollectionConstraintsInfoFUCHSIA *>( this );
9991  }
9992 
9994  {
9995  return *reinterpret_cast<VkBufferCollectionConstraintsInfoFUCHSIA *>( this );
9996  }
9997 
9998 # if defined( VULKAN_HPP_USE_REFLECT )
9999 # if 14 <= VULKAN_HPP_CPP_VERSION
10000  auto
10001 # else
10002  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
10003  const void * const &,
10004  uint32_t const &,
10005  uint32_t const &,
10006  uint32_t const &,
10007  uint32_t const &,
10008  uint32_t const &>
10009 # endif
10010  reflect() const VULKAN_HPP_NOEXCEPT
10011  {
10012  return std::tie( sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack );
10013  }
10014 # endif
10015 
10016 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
10017  auto operator<=>( BufferCollectionConstraintsInfoFUCHSIA const & ) const = default;
10018 # else
10019  bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10020  {
10021 # if defined( VULKAN_HPP_USE_REFLECT )
10022  return this->reflect() == rhs.reflect();
10023 # else
10024  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minBufferCount == rhs.minBufferCount ) && ( maxBufferCount == rhs.maxBufferCount ) &&
10025  ( minBufferCountForCamping == rhs.minBufferCountForCamping ) && ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack ) &&
10026  ( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack );
10027 # endif
10028  }
10029 
10030  bool operator!=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10031  {
10032  return !operator==( rhs );
10033  }
10034 # endif
10035 
10036  public:
10037  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
10038  const void * pNext = {};
10039  uint32_t minBufferCount = {};
10040  uint32_t maxBufferCount = {};
10041  uint32_t minBufferCountForCamping = {};
10042  uint32_t minBufferCountForDedicatedSlack = {};
10043  uint32_t minBufferCountForSharedSlack = {};
10044  };
10045 
10046  template <>
10047  struct CppType<StructureType, StructureType::eBufferCollectionConstraintsInfoFUCHSIA>
10048  {
10049  using Type = BufferCollectionConstraintsInfoFUCHSIA;
10050  };
10051 #endif /*VK_USE_PLATFORM_FUCHSIA*/
10052 
10053 #if defined( VK_USE_PLATFORM_FUCHSIA )
10054  struct BufferCollectionCreateInfoFUCHSIA
10055  {
10056  using NativeType = VkBufferCollectionCreateInfoFUCHSIA;
10057 
10058  static const bool allowDuplicate = false;
10059  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA;
10060 
10061 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
10062  VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
10063  : pNext( pNext_ )
10064  , collectionToken( collectionToken_ )
10065  {
10066  }
10067 
10068  VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10069 
10070  BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10071  : BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionCreateInfoFUCHSIA const *>( &rhs ) )
10072  {
10073  }
10074 
10075  BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10076 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
10077 
10078  BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10079  {
10080  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const *>( &rhs );
10081  return *this;
10082  }
10083 
10084 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
10085  VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
10086  {
10087  pNext = pNext_;
10088  return *this;
10089  }
10090 
10091  VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT
10092  {
10093  collectionToken = collectionToken_;
10094  return *this;
10095  }
10096 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
10097 
10099  {
10100  return *reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( this );
10101  }
10102 
10104  {
10105  return *reinterpret_cast<VkBufferCollectionCreateInfoFUCHSIA *>( this );
10106  }
10107 
10108 # if defined( VULKAN_HPP_USE_REFLECT )
10109 # if 14 <= VULKAN_HPP_CPP_VERSION
10110  auto
10111 # else
10112  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, zx_handle_t const &>
10113 # endif
10114  reflect() const VULKAN_HPP_NOEXCEPT
10115  {
10116  return std::tie( sType, pNext, collectionToken );
10117  }
10118 # endif
10119 
10120 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
10121  std::strong_ordering operator<=>( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10122  {
10123  if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
10124  return cmp;
10125  if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
10126  return cmp;
10127  if ( auto cmp = memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ); cmp != 0 )
10128  return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
10129 
10130  return std::strong_ordering::equivalent;
10131  }
10132 # endif
10133 
10134  bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10135  {
10136  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 );
10137  }
10138 
10139  bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10140  {
10141  return !operator==( rhs );
10142  }
10143 
10144  public:
10145  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionCreateInfoFUCHSIA;
10146  const void * pNext = {};
10147  zx_handle_t collectionToken = {};
10148  };
10149 
10150  template <>
10151  struct CppType<StructureType, StructureType::eBufferCollectionCreateInfoFUCHSIA>
10152  {
10153  using Type = BufferCollectionCreateInfoFUCHSIA;
10154  };
10155 #endif /*VK_USE_PLATFORM_FUCHSIA*/
10156 
10157 #if defined( VK_USE_PLATFORM_FUCHSIA )
10158  struct BufferCollectionImageCreateInfoFUCHSIA
10159  {
10160  using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA;
10161 
10162  static const bool allowDuplicate = false;
10163  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
10164 
10165 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
10166  VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {},
10167  uint32_t index_ = {},
10168  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
10169  : pNext( pNext_ )
10170  , collection( collection_ )
10171  , index( index_ )
10172  {
10173  }
10174 
10175  VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10176 
10177  BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10178  : BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs ) )
10179  {
10180  }
10181 
10182  BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10183 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
10184 
10185  BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10186  {
10187  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs );
10188  return *this;
10189  }
10190 
10191 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
10192  VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
10193  {
10194  pNext = pNext_;
10195  return *this;
10196  }
10197 
10198  VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA &
10199  setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
10200  {
10201  collection = collection_;
10202  return *this;
10203  }
10204 
10205  VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
10206  {
10207  index = index_;
10208  return *this;
10209  }
10210 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
10211 
10213  {
10214  return *reinterpret_cast<const VkBufferCollectionImageCreateInfoFUCHSIA *>( this );
10215  }
10216 
10218  {
10219  return *reinterpret_cast<VkBufferCollectionImageCreateInfoFUCHSIA *>( this );
10220  }
10221 
10222 # if defined( VULKAN_HPP_USE_REFLECT )
10223 # if 14 <= VULKAN_HPP_CPP_VERSION
10224  auto
10225 # else
10226  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
10227 # endif
10228  reflect() const VULKAN_HPP_NOEXCEPT
10229  {
10230  return std::tie( sType, pNext, collection, index );
10231  }
10232 # endif
10233 
10234 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
10235  auto operator<=>( BufferCollectionImageCreateInfoFUCHSIA const & ) const = default;
10236 # else
10237  bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10238  {
10239 # if defined( VULKAN_HPP_USE_REFLECT )
10240  return this->reflect() == rhs.reflect();
10241 # else
10242  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index );
10243 # endif
10244  }
10245 
10246  bool operator!=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10247  {
10248  return !operator==( rhs );
10249  }
10250 # endif
10251 
10252  public:
10253  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
10254  const void * pNext = {};
10255  VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
10256  uint32_t index = {};
10257  };
10258 
10259  template <>
10260  struct CppType<StructureType, StructureType::eBufferCollectionImageCreateInfoFUCHSIA>
10261  {
10262  using Type = BufferCollectionImageCreateInfoFUCHSIA;
10263  };
10264 #endif /*VK_USE_PLATFORM_FUCHSIA*/
10265 
10266 #if defined( VK_USE_PLATFORM_FUCHSIA )
10267  struct SysmemColorSpaceFUCHSIA
10268  {
10269  using NativeType = VkSysmemColorSpaceFUCHSIA;
10270 
10271  static const bool allowDuplicate = false;
10272  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA;
10273 
10274 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
10275  VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
10276  : pNext( pNext_ )
10277  , colorSpace( colorSpace_ )
10278  {
10279  }
10280 
10281  VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10282 
10283  SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10284  : SysmemColorSpaceFUCHSIA( *reinterpret_cast<SysmemColorSpaceFUCHSIA const *>( &rhs ) )
10285  {
10286  }
10287 
10288  SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10289 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
10290 
10291  SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10292  {
10293  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const *>( &rhs );
10294  return *this;
10295  }
10296 
10297 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
10298  VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
10299  {
10300  pNext = pNext_;
10301  return *this;
10302  }
10303 
10304  VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setColorSpace( uint32_t colorSpace_ ) VULKAN_HPP_NOEXCEPT
10305  {
10306  colorSpace = colorSpace_;
10307  return *this;
10308  }
10309 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
10310 
10311  operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
10312  {
10313  return *reinterpret_cast<const VkSysmemColorSpaceFUCHSIA *>( this );
10314  }
10315 
10317  {
10318  return *reinterpret_cast<VkSysmemColorSpaceFUCHSIA *>( this );
10319  }
10320 
10321 # if defined( VULKAN_HPP_USE_REFLECT )
10322 # if 14 <= VULKAN_HPP_CPP_VERSION
10323  auto
10324 # else
10325  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
10326 # endif
10327  reflect() const VULKAN_HPP_NOEXCEPT
10328  {
10329  return std::tie( sType, pNext, colorSpace );
10330  }
10331 # endif
10332 
10333 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
10334  auto operator<=>( SysmemColorSpaceFUCHSIA const & ) const = default;
10335 # else
10336  bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10337  {
10338 # if defined( VULKAN_HPP_USE_REFLECT )
10339  return this->reflect() == rhs.reflect();
10340 # else
10341  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorSpace == rhs.colorSpace );
10342 # endif
10343  }
10344 
10345  bool operator!=( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10346  {
10347  return !operator==( rhs );
10348  }
10349 # endif
10350 
10351  public:
10352  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSysmemColorSpaceFUCHSIA;
10353  const void * pNext = {};
10354  uint32_t colorSpace = {};
10355  };
10356 
10357  template <>
10358  struct CppType<StructureType, StructureType::eSysmemColorSpaceFUCHSIA>
10359  {
10360  using Type = SysmemColorSpaceFUCHSIA;
10361  };
10362 #endif /*VK_USE_PLATFORM_FUCHSIA*/
10363 
10364 #if defined( VK_USE_PLATFORM_FUCHSIA )
10365  struct BufferCollectionPropertiesFUCHSIA
10366  {
10367  using NativeType = VkBufferCollectionPropertiesFUCHSIA;
10368 
10369  static const bool allowDuplicate = false;
10370  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA;
10371 
10372 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
10373  VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA(
10374  uint32_t memoryTypeBits_ = {},
10375  uint32_t bufferCount_ = {},
10376  uint32_t createInfoIndex_ = {},
10377  uint64_t sysmemPixelFormat_ = {},
10378  VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {},
10379  VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {},
10380  VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {},
10385  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
10386  : pNext( pNext_ )
10387  , memoryTypeBits( memoryTypeBits_ )
10388  , bufferCount( bufferCount_ )
10389  , createInfoIndex( createInfoIndex_ )
10390  , sysmemPixelFormat( sysmemPixelFormat_ )
10391  , formatFeatures( formatFeatures_ )
10392  , sysmemColorSpaceIndex( sysmemColorSpaceIndex_ )
10393  , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
10394  , suggestedYcbcrModel( suggestedYcbcrModel_ )
10395  , suggestedYcbcrRange( suggestedYcbcrRange_ )
10396  , suggestedXChromaOffset( suggestedXChromaOffset_ )
10397  , suggestedYChromaOffset( suggestedYChromaOffset_ )
10398  {
10399  }
10400 
10401  VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10402 
10403  BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10404  : BufferCollectionPropertiesFUCHSIA( *reinterpret_cast<BufferCollectionPropertiesFUCHSIA const *>( &rhs ) )
10405  {
10406  }
10407 
10408  BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10409 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
10410 
10411  BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10412  {
10413  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const *>( &rhs );
10414  return *this;
10415  }
10416 
10417 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
10418  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
10419  {
10420  pNext = pNext_;
10421  return *this;
10422  }
10423 
10424  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setMemoryTypeBits( uint32_t memoryTypeBits_ ) VULKAN_HPP_NOEXCEPT
10425  {
10426  memoryTypeBits = memoryTypeBits_;
10427  return *this;
10428  }
10429 
10430  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT
10431  {
10432  bufferCount = bufferCount_;
10433  return *this;
10434  }
10435 
10436  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setCreateInfoIndex( uint32_t createInfoIndex_ ) VULKAN_HPP_NOEXCEPT
10437  {
10438  createInfoIndex = createInfoIndex_;
10439  return *this;
10440  }
10441 
10442  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
10443  {
10444  sysmemPixelFormat = sysmemPixelFormat_;
10445  return *this;
10446  }
10447 
10448  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
10449  setFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ ) VULKAN_HPP_NOEXCEPT
10450  {
10451  formatFeatures = formatFeatures_;
10452  return *this;
10453  }
10454 
10455  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
10456  setSysmemColorSpaceIndex( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceIndex_ ) VULKAN_HPP_NOEXCEPT
10457  {
10458  sysmemColorSpaceIndex = sysmemColorSpaceIndex_;
10459  return *this;
10460  }
10461 
10462  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
10463  setSamplerYcbcrConversionComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & samplerYcbcrConversionComponents_ ) VULKAN_HPP_NOEXCEPT
10464  {
10465  samplerYcbcrConversionComponents = samplerYcbcrConversionComponents_;
10466  return *this;
10467  }
10468 
10469  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
10470  setSuggestedYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ ) VULKAN_HPP_NOEXCEPT
10471  {
10472  suggestedYcbcrModel = suggestedYcbcrModel_;
10473  return *this;
10474  }
10475 
10476  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
10477  setSuggestedYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ ) VULKAN_HPP_NOEXCEPT
10478  {
10479  suggestedYcbcrRange = suggestedYcbcrRange_;
10480  return *this;
10481  }
10482 
10483  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
10484  setSuggestedXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ ) VULKAN_HPP_NOEXCEPT
10485  {
10486  suggestedXChromaOffset = suggestedXChromaOffset_;
10487  return *this;
10488  }
10489 
10490  VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
10491  setSuggestedYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ ) VULKAN_HPP_NOEXCEPT
10492  {
10493  suggestedYChromaOffset = suggestedYChromaOffset_;
10494  return *this;
10495  }
10496 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
10497 
10499  {
10500  return *reinterpret_cast<const VkBufferCollectionPropertiesFUCHSIA *>( this );
10501  }
10502 
10504  {
10505  return *reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( this );
10506  }
10507 
10508 # if defined( VULKAN_HPP_USE_REFLECT )
10509 # if 14 <= VULKAN_HPP_CPP_VERSION
10510  auto
10511 # else
10512  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
10513  void * const &,
10514  uint32_t const &,
10515  uint32_t const &,
10516  uint32_t const &,
10517  uint64_t const &,
10519  VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const &,
10525 # endif
10526  reflect() const VULKAN_HPP_NOEXCEPT
10527  {
10528  return std::tie( sType,
10529  pNext,
10530  memoryTypeBits,
10531  bufferCount,
10532  createInfoIndex,
10533  sysmemPixelFormat,
10534  formatFeatures,
10535  sysmemColorSpaceIndex,
10536  samplerYcbcrConversionComponents,
10537  suggestedYcbcrModel,
10538  suggestedYcbcrRange,
10539  suggestedXChromaOffset,
10540  suggestedYChromaOffset );
10541  }
10542 # endif
10543 
10544 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
10545  auto operator<=>( BufferCollectionPropertiesFUCHSIA const & ) const = default;
10546 # else
10547  bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10548  {
10549 # if defined( VULKAN_HPP_USE_REFLECT )
10550  return this->reflect() == rhs.reflect();
10551 # else
10552  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ) && ( bufferCount == rhs.bufferCount ) &&
10553  ( createInfoIndex == rhs.createInfoIndex ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( formatFeatures == rhs.formatFeatures ) &&
10554  ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
10555  ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
10556  ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
10557 # endif
10558  }
10559 
10560  bool operator!=( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10561  {
10562  return !operator==( rhs );
10563  }
10564 # endif
10565 
10566  public:
10567  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA;
10568  void * pNext = {};
10569  uint32_t memoryTypeBits = {};
10570  uint32_t bufferCount = {};
10571  uint32_t createInfoIndex = {};
10572  uint64_t sysmemPixelFormat = {};
10573  VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
10574  VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {};
10575  VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
10580  };
10581 
10582  template <>
10583  struct CppType<StructureType, StructureType::eBufferCollectionPropertiesFUCHSIA>
10584  {
10585  using Type = BufferCollectionPropertiesFUCHSIA;
10586  };
10587 #endif /*VK_USE_PLATFORM_FUCHSIA*/
10588 
10590  {
10592 
10593  static const bool allowDuplicate = false;
10595 
10596 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
10601  uint32_t queueFamilyIndexCount_ = {},
10602  const uint32_t * pQueueFamilyIndices_ = {},
10603  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
10604  : pNext( pNext_ )
10605  , flags( flags_ )
10606  , size( size_ )
10607  , usage( usage_ )
10608  , sharingMode( sharingMode_ )
10609  , queueFamilyIndexCount( queueFamilyIndexCount_ )
10610  , pQueueFamilyIndices( pQueueFamilyIndices_ )
10611  {
10612  }
10613 
10615 
10616  BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) ) {}
10617 
10618 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
10622  VULKAN_HPP_NAMESPACE::SharingMode sharingMode_,
10624  const void * pNext_ = nullptr )
10625  : pNext( pNext_ )
10626  , flags( flags_ )
10627  , size( size_ )
10628  , usage( usage_ )
10629  , sharingMode( sharingMode_ )
10630  , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
10631  , pQueueFamilyIndices( queueFamilyIndices_.data() )
10632  {
10633  }
10634 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
10635 
10637 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
10638 
10640  {
10641  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>( &rhs );
10642  return *this;
10643  }
10644 
10645 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
10647  {
10648  pNext = pNext_;
10649  return *this;
10650  }
10651 
10653  {
10654  flags = flags_;
10655  return *this;
10656  }
10657 
10659  {
10660  size = size_;
10661  return *this;
10662  }
10663 
10665  {
10666  usage = usage_;
10667  return *this;
10668  }
10669 
10671  {
10672  sharingMode = sharingMode_;
10673  return *this;
10674  }
10675 
10677  {
10678  queueFamilyIndexCount = queueFamilyIndexCount_;
10679  return *this;
10680  }
10681 
10683  {
10684  pQueueFamilyIndices = pQueueFamilyIndices_;
10685  return *this;
10686  }
10687 
10688 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
10690  {
10691  queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
10692  pQueueFamilyIndices = queueFamilyIndices_.data();
10693  return *this;
10694  }
10695 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
10696 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
10697 
10698  operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
10699  {
10700  return *reinterpret_cast<const VkBufferCreateInfo *>( this );
10701  }
10702 
10704  {
10705  return *reinterpret_cast<VkBufferCreateInfo *>( this );
10706  }
10707 
10708 #if defined( VULKAN_HPP_USE_REFLECT )
10709 # if 14 <= VULKAN_HPP_CPP_VERSION
10710  auto
10711 # else
10712  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
10713  const void * const &,
10718  uint32_t const &,
10719  const uint32_t * const &>
10720 # endif
10721  reflect() const VULKAN_HPP_NOEXCEPT
10722  {
10724  }
10725 #endif
10726 
10727 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
10728  auto operator<=>( BufferCreateInfo const & ) const = default;
10729 #else
10731  {
10732 # if defined( VULKAN_HPP_USE_REFLECT )
10733  return this->reflect() == rhs.reflect();
10734 # else
10735  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && ( usage == rhs.usage ) &&
10736  ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
10737 # endif
10738  }
10739 
10741  {
10742  return !operator==( rhs );
10743  }
10744 #endif
10745 
10746  public:
10748  const void * pNext = {};
10753  uint32_t queueFamilyIndexCount = {};
10754  const uint32_t * pQueueFamilyIndices = {};
10755  };
10756 
10757  template <>
10759  {
10761  };
10762 
10763 #if defined( VK_USE_PLATFORM_FUCHSIA )
10764  struct BufferConstraintsInfoFUCHSIA
10765  {
10766  using NativeType = VkBufferConstraintsInfoFUCHSIA;
10767 
10768  static const bool allowDuplicate = false;
10769  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA;
10770 
10771 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
10772  VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {},
10773  VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {},
10774  VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {},
10775  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
10776  : pNext( pNext_ )
10777  , createInfo( createInfo_ )
10778  , requiredFormatFeatures( requiredFormatFeatures_ )
10779  , bufferCollectionConstraints( bufferCollectionConstraints_ )
10780  {
10781  }
10782 
10783  VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10784 
10785  BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10786  : BufferConstraintsInfoFUCHSIA( *reinterpret_cast<BufferConstraintsInfoFUCHSIA const *>( &rhs ) )
10787  {
10788  }
10789 
10790  BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10791 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
10792 
10793  BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
10794  {
10795  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const *>( &rhs );
10796  return *this;
10797  }
10798 
10799 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
10800  VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
10801  {
10802  pNext = pNext_;
10803  return *this;
10804  }
10805 
10806  VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT
10807  {
10808  createInfo = createInfo_;
10809  return *this;
10810  }
10811 
10812  VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA &
10813  setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
10814  {
10815  requiredFormatFeatures = requiredFormatFeatures_;
10816  return *this;
10817  }
10818 
10819  VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA &
10820  setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
10821  {
10822  bufferCollectionConstraints = bufferCollectionConstraints_;
10823  return *this;
10824  }
10825 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
10826 
10827  operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
10828  {
10829  return *reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( this );
10830  }
10831 
10833  {
10834  return *reinterpret_cast<VkBufferConstraintsInfoFUCHSIA *>( this );
10835  }
10836 
10837 # if defined( VULKAN_HPP_USE_REFLECT )
10838 # if 14 <= VULKAN_HPP_CPP_VERSION
10839  auto
10840 # else
10841  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
10842  const void * const &,
10845  VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &>
10846 # endif
10847  reflect() const VULKAN_HPP_NOEXCEPT
10848  {
10849  return std::tie( sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints );
10850  }
10851 # endif
10852 
10853 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
10854  auto operator<=>( BufferConstraintsInfoFUCHSIA const & ) const = default;
10855 # else
10856  bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10857  {
10858 # if defined( VULKAN_HPP_USE_REFLECT )
10859  return this->reflect() == rhs.reflect();
10860 # else
10861  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createInfo == rhs.createInfo ) && ( requiredFormatFeatures == rhs.requiredFormatFeatures ) &&
10862  ( bufferCollectionConstraints == rhs.bufferCollectionConstraints );
10863 # endif
10864  }
10865 
10866  bool operator!=( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
10867  {
10868  return !operator==( rhs );
10869  }
10870 # endif
10871 
10872  public:
10873  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA;
10874  const void * pNext = {};
10876  VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {};
10877  VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
10878  };
10879 
10880  template <>
10881  struct CppType<StructureType, StructureType::eBufferConstraintsInfoFUCHSIA>
10882  {
10883  using Type = BufferConstraintsInfoFUCHSIA;
10884  };
10885 #endif /*VK_USE_PLATFORM_FUCHSIA*/
10886 
10887  struct BufferCopy
10888  {
10890 
10891 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
10893  VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
10895  : srcOffset( srcOffset_ )
10896  , dstOffset( dstOffset_ )
10897  , size( size_ )
10898  {
10899  }
10900 
10902 
10903  BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) ) {}
10904 
10905  BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
10906 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
10907 
10909  {
10910  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>( &rhs );
10911  return *this;
10912  }
10913 
10914 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
10916  {
10917  srcOffset = srcOffset_;
10918  return *this;
10919  }
10920 
10922  {
10923  dstOffset = dstOffset_;
10924  return *this;
10925  }
10926 
10928  {
10929  size = size_;
10930  return *this;
10931  }
10932 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
10933 
10934  operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT
10935  {
10936  return *reinterpret_cast<const VkBufferCopy *>( this );
10937  }
10938 
10940  {
10941  return *reinterpret_cast<VkBufferCopy *>( this );
10942  }
10943 
10944 #if defined( VULKAN_HPP_USE_REFLECT )
10945 # if 14 <= VULKAN_HPP_CPP_VERSION
10946  auto
10947 # else
10948  std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
10949 # endif
10950  reflect() const VULKAN_HPP_NOEXCEPT
10951  {
10952  return std::tie( srcOffset, dstOffset, size );
10953  }
10954 #endif
10955 
10956 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
10957  auto operator<=>( BufferCopy const & ) const = default;
10958 #else
10959  bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
10960  {
10961 # if defined( VULKAN_HPP_USE_REFLECT )
10962  return this->reflect() == rhs.reflect();
10963 # else
10964  return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
10965 # endif
10966  }
10967 
10968  bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
10969  {
10970  return !operator==( rhs );
10971  }
10972 #endif
10973 
10974  public:
10978  };
10979 
10981  {
10983 
10984  static const bool allowDuplicate = false;
10986 
10987 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
10989  VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
10991  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
10992  : pNext( pNext_ )
10993  , srcOffset( srcOffset_ )
10994  , dstOffset( dstOffset_ )
10995  , size( size_ )
10996  {
10997  }
10998 
11000 
11001  BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy2( *reinterpret_cast<BufferCopy2 const *>( &rhs ) ) {}
11002 
11003  BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
11004 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
11005 
11007  {
11008  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy2 const *>( &rhs );
11009  return *this;
11010  }
11011 
11012 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
11014  {
11015  pNext = pNext_;
11016  return *this;
11017  }
11018 
11020  {
11021  srcOffset = srcOffset_;
11022  return *this;
11023  }
11024 
11026  {
11027  dstOffset = dstOffset_;
11028  return *this;
11029  }
11030 
11032  {
11033  size = size_;
11034  return *this;
11035  }
11036 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
11037 
11038  operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT
11039  {
11040  return *reinterpret_cast<const VkBufferCopy2 *>( this );
11041  }
11042 
11044  {
11045  return *reinterpret_cast<VkBufferCopy2 *>( this );
11046  }
11047 
11048 #if defined( VULKAN_HPP_USE_REFLECT )
11049 # if 14 <= VULKAN_HPP_CPP_VERSION
11050  auto
11051 # else
11052  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
11053  const void * const &,
11057 # endif
11058  reflect() const VULKAN_HPP_NOEXCEPT
11059  {
11060  return std::tie( sType, pNext, srcOffset, dstOffset, size );
11061  }
11062 #endif
11063 
11064 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
11065  auto operator<=>( BufferCopy2 const & ) const = default;
11066 #else
11067  bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
11068  {
11069 # if defined( VULKAN_HPP_USE_REFLECT )
11070  return this->reflect() == rhs.reflect();
11071 # else
11072  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
11073 # endif
11074  }
11075 
11076  bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
11077  {
11078  return !operator==( rhs );
11079  }
11080 #endif
11081 
11082  public:
11084  const void * pNext = {};
11088  };
11089 
11090  template <>
11092  {
11094  };
11095  using BufferCopy2KHR = BufferCopy2;
11096 
11098  {
11100 
11101  static const bool allowDuplicate = false;
11103 
11104 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
11106  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
11107  : pNext( pNext_ )
11108  , deviceAddress( deviceAddress_ )
11109  {
11110  }
11111 
11113 
11115  : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast<BufferDeviceAddressCreateInfoEXT const *>( &rhs ) )
11116  {
11117  }
11118 
11120 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
11121 
11123  {
11124  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>( &rhs );
11125  return *this;
11126  }
11127 
11128 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
11130  {
11131  pNext = pNext_;
11132  return *this;
11133  }
11134 
11136  {
11137  deviceAddress = deviceAddress_;
11138  return *this;
11139  }
11140 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
11141 
11143  {
11144  return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT *>( this );
11145  }
11146 
11148  {
11149  return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT *>( this );
11150  }
11151 
11152 #if defined( VULKAN_HPP_USE_REFLECT )
11153 # if 14 <= VULKAN_HPP_CPP_VERSION
11154  auto
11155 # else
11156  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
11157 # endif
11158  reflect() const VULKAN_HPP_NOEXCEPT
11159  {
11160  return std::tie( sType, pNext, deviceAddress );
11161  }
11162 #endif
11163 
11164 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
11165  auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default;
11166 #else
11168  {
11169 # if defined( VULKAN_HPP_USE_REFLECT )
11170  return this->reflect() == rhs.reflect();
11171 # else
11172  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress );
11173 # endif
11174  }
11175 
11177  {
11178  return !operator==( rhs );
11179  }
11180 #endif
11181 
11182  public:
11184  const void * pNext = {};
11186  };
11187 
11188  template <>
11190  {
11192  };
11193 
11195  {
11197 
11198  static const bool allowDuplicate = false;
11200 
11201 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
11203  : pNext( pNext_ )
11204  , buffer( buffer_ )
11205  {
11206  }
11207 
11209 
11211  : BufferDeviceAddressInfo( *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs ) )
11212  {
11213  }
11214 
11216 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
11217 
11219  {
11220  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>( &rhs );
11221  return *this;
11222  }
11223 
11224 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
11226  {
11227  pNext = pNext_;
11228  return *this;
11229  }
11230 
11232  {
11233  buffer = buffer_;
11234  return *this;
11235  }
11236 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
11237 
11239  {
11240  return *reinterpret_cast<const VkBufferDeviceAddressInfo *>( this );
11241  }
11242 
11244  {
11245  return *reinterpret_cast<VkBufferDeviceAddressInfo *>( this );
11246  }
11247 
11248 #if defined( VULKAN_HPP_USE_REFLECT )
11249 # if 14 <= VULKAN_HPP_CPP_VERSION
11250  auto
11251 # else
11252  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
11253 # endif
11254  reflect() const VULKAN_HPP_NOEXCEPT
11255  {
11256  return std::tie( sType, pNext, buffer );
11257  }
11258 #endif
11259 
11260 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
11261  auto operator<=>( BufferDeviceAddressInfo const & ) const = default;
11262 #else
11264  {
11265 # if defined( VULKAN_HPP_USE_REFLECT )
11266  return this->reflect() == rhs.reflect();
11267 # else
11268  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
11269 # endif
11270  }
11271 
11273  {
11274  return !operator==( rhs );
11275  }
11276 #endif
11277 
11278  public:
11280  const void * pNext = {};
11282  };
11283 
11284  template <>
11286  {
11288  };
11291 
11293  {
11295 
11296 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
11298  uint32_t bufferRowLength_ = {},
11299  uint32_t bufferImageHeight_ = {},
11300  VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
11301  VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
11303  : bufferOffset( bufferOffset_ )
11304  , bufferRowLength( bufferRowLength_ )
11305  , bufferImageHeight( bufferImageHeight_ )
11306  , imageSubresource( imageSubresource_ )
11307  , imageOffset( imageOffset_ )
11308  , imageExtent( imageExtent_ )
11309  {
11310  }
11311 
11313 
11314  BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) ) {}
11315 
11316  BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
11317 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
11318 
11320  {
11321  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>( &rhs );
11322  return *this;
11323  }
11324 
11325 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
11327  {
11328  bufferOffset = bufferOffset_;
11329  return *this;
11330  }
11331 
11333  {
11334  bufferRowLength = bufferRowLength_;
11335  return *this;
11336  }
11337 
11339  {
11340  bufferImageHeight = bufferImageHeight_;
11341  return *this;
11342  }
11343 
11345  {
11346  imageSubresource = imageSubresource_;
11347  return *this;
11348  }
11349 
11351  {
11352  imageOffset = imageOffset_;
11353  return *this;
11354  }
11355 
11357  {
11358  imageExtent = imageExtent_;
11359  return *this;
11360  }
11361 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
11362 
11363  operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT
11364  {
11365  return *reinterpret_cast<const VkBufferImageCopy *>( this );
11366  }
11367 
11369  {
11370  return *reinterpret_cast<VkBufferImageCopy *>( this );
11371  }
11372 
11373 #if defined( VULKAN_HPP_USE_REFLECT )
11374 # if 14 <= VULKAN_HPP_CPP_VERSION
11375  auto
11376 # else
11377  std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &,
11378  uint32_t const &,
11379  uint32_t const &,
11383 # endif
11384  reflect() const VULKAN_HPP_NOEXCEPT
11385  {
11387  }
11388 #endif
11389 
11390 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
11391  auto operator<=>( BufferImageCopy const & ) const = default;
11392 #else
11394  {
11395 # if defined( VULKAN_HPP_USE_REFLECT )
11396  return this->reflect() == rhs.reflect();
11397 # else
11398  return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) &&
11399  ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent );
11400 # endif
11401  }
11402 
11404  {
11405  return !operator==( rhs );
11406  }
11407 #endif
11408 
11409  public:
11411  uint32_t bufferRowLength = {};
11412  uint32_t bufferImageHeight = {};
11416  };
11417 
11419  {
11421 
11422  static const bool allowDuplicate = false;
11424 
11425 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
11427  uint32_t bufferRowLength_ = {},
11428  uint32_t bufferImageHeight_ = {},
11429  VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
11430  VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
11431  VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {},
11432  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
11433  : pNext( pNext_ )
11434  , bufferOffset( bufferOffset_ )
11435  , bufferRowLength( bufferRowLength_ )
11436  , bufferImageHeight( bufferImageHeight_ )
11437  , imageSubresource( imageSubresource_ )
11438  , imageOffset( imageOffset_ )
11439  , imageExtent( imageExtent_ )
11440  {
11441  }
11442 
11444 
11445  BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy2( *reinterpret_cast<BufferImageCopy2 const *>( &rhs ) ) {}
11446 
11448 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
11449 
11451  {
11452  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy2 const *>( &rhs );
11453  return *this;
11454  }
11455 
11456 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
11458  {
11459  pNext = pNext_;
11460  return *this;
11461  }
11462 
11464  {
11465  bufferOffset = bufferOffset_;
11466  return *this;
11467  }
11468 
11470  {
11471  bufferRowLength = bufferRowLength_;
11472  return *this;
11473  }
11474 
11476  {
11477  bufferImageHeight = bufferImageHeight_;
11478  return *this;
11479  }
11480 
11482  {
11483  imageSubresource = imageSubresource_;
11484  return *this;
11485  }
11486 
11488  {
11489  imageOffset = imageOffset_;
11490  return *this;
11491  }
11492 
11494  {
11495  imageExtent = imageExtent_;
11496  return *this;
11497  }
11498 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
11499 
11500  operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
11501  {
11502  return *reinterpret_cast<const VkBufferImageCopy2 *>( this );
11503  }
11504 
11506  {
11507  return *reinterpret_cast<VkBufferImageCopy2 *>( this );
11508  }
11509 
11510 #if defined( VULKAN_HPP_USE_REFLECT )
11511 # if 14 <= VULKAN_HPP_CPP_VERSION
11512  auto
11513 # else
11514  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
11515  const void * const &,
11517  uint32_t const &,
11518  uint32_t const &,
11522 # endif
11523  reflect() const VULKAN_HPP_NOEXCEPT
11524  {
11526  }
11527 #endif
11528 
11529 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
11530  auto operator<=>( BufferImageCopy2 const & ) const = default;
11531 #else
11533  {
11534 # if defined( VULKAN_HPP_USE_REFLECT )
11535  return this->reflect() == rhs.reflect();
11536 # else
11537  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) &&
11538  ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
11539  ( imageExtent == rhs.imageExtent );
11540 # endif
11541  }
11542 
11544  {
11545  return !operator==( rhs );
11546  }
11547 #endif
11548 
11549  public:
11551  const void * pNext = {};
11553  uint32_t bufferRowLength = {};
11554  uint32_t bufferImageHeight = {};
11558  };
11559 
11560  template <>
11562  {
11564  };
11566 
11568  {
11570 
11571  static const bool allowDuplicate = false;
11573 
11574 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
11576  VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
11577  uint32_t srcQueueFamilyIndex_ = {},
11578  uint32_t dstQueueFamilyIndex_ = {},
11579  VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
11580  VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
11582  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
11583  : pNext( pNext_ )
11584  , srcAccessMask( srcAccessMask_ )
11585  , dstAccessMask( dstAccessMask_ )
11586  , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
11587  , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
11588  , buffer( buffer_ )
11589  , offset( offset_ )
11590  , size( size_ )
11591  {
11592  }
11593 
11595 
11596  BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
11597  {
11598  }
11599 
11601 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
11602 
11604  {
11605  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>( &rhs );
11606  return *this;
11607  }
11608 
11609 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
11611  {
11612  pNext = pNext_;
11613  return *this;
11614  }
11615 
11617  {
11618  srcAccessMask = srcAccessMask_;
11619  return *this;
11620  }
11621 
11623  {
11624  dstAccessMask = dstAccessMask_;
11625  return *this;
11626  }
11627 
11629  {
11630  srcQueueFamilyIndex = srcQueueFamilyIndex_;
11631  return *this;
11632  }
11633 
11635  {
11636  dstQueueFamilyIndex = dstQueueFamilyIndex_;
11637  return *this;
11638  }
11639 
11641  {
11642  buffer = buffer_;
11643  return *this;
11644  }
11645 
11647  {
11648  offset = offset_;
11649  return *this;
11650  }
11651 
11653  {
11654  size = size_;
11655  return *this;
11656  }
11657 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
11658 
11660  {
11661  return *reinterpret_cast<const VkBufferMemoryBarrier *>( this );
11662  }
11663 
11665  {
11666  return *reinterpret_cast<VkBufferMemoryBarrier *>( this );
11667  }
11668 
11669 #if defined( VULKAN_HPP_USE_REFLECT )
11670 # if 14 <= VULKAN_HPP_CPP_VERSION
11671  auto
11672 # else
11673  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
11674  const void * const &,
11677  uint32_t const &,
11678  uint32_t const &,
11682 # endif
11683  reflect() const VULKAN_HPP_NOEXCEPT
11684  {
11686  }
11687 #endif
11688 
11689 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
11690  auto operator<=>( BufferMemoryBarrier const & ) const = default;
11691 #else
11693  {
11694 # if defined( VULKAN_HPP_USE_REFLECT )
11695  return this->reflect() == rhs.reflect();
11696 # else
11697  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
11698  ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) &&
11699  ( offset == rhs.offset ) && ( size == rhs.size );
11700 # endif
11701  }
11702 
11704  {
11705  return !operator==( rhs );
11706  }
11707 #endif
11708 
11709  public:
11711  const void * pNext = {};
11714  uint32_t srcQueueFamilyIndex = {};
11715  uint32_t dstQueueFamilyIndex = {};
11719  };
11720 
11721  template <>
11723  {
11725  };
11726 
11728  {
11730 
11731  static const bool allowDuplicate = false;
11733 
11734 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
11736  VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {},
11737  VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {},
11738  VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {},
11739  uint32_t srcQueueFamilyIndex_ = {},
11740  uint32_t dstQueueFamilyIndex_ = {},
11741  VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
11742  VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
11744  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
11745  : pNext( pNext_ )
11746  , srcStageMask( srcStageMask_ )
11747  , srcAccessMask( srcAccessMask_ )
11748  , dstStageMask( dstStageMask_ )
11749  , dstAccessMask( dstAccessMask_ )
11750  , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
11751  , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
11752  , buffer( buffer_ )
11753  , offset( offset_ )
11754  , size( size_ )
11755  {
11756  }
11757 
11759 
11761  : BufferMemoryBarrier2( *reinterpret_cast<BufferMemoryBarrier2 const *>( &rhs ) )
11762  {
11763  }
11764 
11766 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
11767 
11769  {
11770  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const *>( &rhs );
11771  return *this;
11772  }
11773 
11774 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
11776  {
11777  pNext = pNext_;
11778  return *this;
11779  }
11780 
11782  {
11783  srcStageMask = srcStageMask_;
11784  return *this;
11785  }
11786 
11788  {
11789  srcAccessMask = srcAccessMask_;
11790  return *this;
11791  }
11792 
11794  {
11795  dstStageMask = dstStageMask_;
11796  return *this;
11797  }
11798 
11800  {
11801  dstAccessMask = dstAccessMask_;
11802  return *this;
11803  }
11804 
11806  {
11807  srcQueueFamilyIndex = srcQueueFamilyIndex_;
11808  return *this;
11809  }
11810 
11812  {
11813  dstQueueFamilyIndex = dstQueueFamilyIndex_;
11814  return *this;
11815  }
11816 
11818  {
11819  buffer = buffer_;
11820  return *this;
11821  }
11822 
11824  {
11825  offset = offset_;
11826  return *this;
11827  }
11828 
11830  {
11831  size = size_;
11832  return *this;
11833  }
11834 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
11835 
11837  {
11838  return *reinterpret_cast<const VkBufferMemoryBarrier2 *>( this );
11839  }
11840 
11842  {
11843  return *reinterpret_cast<VkBufferMemoryBarrier2 *>( this );
11844  }
11845 
11846 #if defined( VULKAN_HPP_USE_REFLECT )
11847 # if 14 <= VULKAN_HPP_CPP_VERSION
11848  auto
11849 # else
11850  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
11851  const void * const &,
11856  uint32_t const &,
11857  uint32_t const &,
11861 # endif
11862  reflect() const VULKAN_HPP_NOEXCEPT
11863  {
11865  }
11866 #endif
11867 
11868 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
11869  auto operator<=>( BufferMemoryBarrier2 const & ) const = default;
11870 #else
11872  {
11873 # if defined( VULKAN_HPP_USE_REFLECT )
11874  return this->reflect() == rhs.reflect();
11875 # else
11876  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
11877  ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
11878  ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size );
11879 # endif
11880  }
11881 
11883  {
11884  return !operator==( rhs );
11885  }
11886 #endif
11887 
11888  public:
11890  const void * pNext = {};
11895  uint32_t srcQueueFamilyIndex = {};
11896  uint32_t dstQueueFamilyIndex = {};
11900  };
11901 
11902  template <>
11904  {
11906  };
11908 
11910  {
11912 
11913  static const bool allowDuplicate = false;
11915 
11916 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
11918  : pNext( pNext_ )
11919  , buffer( buffer_ )
11920  {
11921  }
11922 
11924 
11926  : BufferMemoryRequirementsInfo2( *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs ) )
11927  {
11928  }
11929 
11931 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
11932 
11934  {
11935  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>( &rhs );
11936  return *this;
11937  }
11938 
11939 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
11941  {
11942  pNext = pNext_;
11943  return *this;
11944  }
11945 
11947  {
11948  buffer = buffer_;
11949  return *this;
11950  }
11951 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
11952 
11954  {
11955  return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( this );
11956  }
11957 
11959  {
11960  return *reinterpret_cast<VkBufferMemoryRequirementsInfo2 *>( this );
11961  }
11962 
11963 #if defined( VULKAN_HPP_USE_REFLECT )
11964 # if 14 <= VULKAN_HPP_CPP_VERSION
11965  auto
11966 # else
11967  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
11968 # endif
11969  reflect() const VULKAN_HPP_NOEXCEPT
11970  {
11971  return std::tie( sType, pNext, buffer );
11972  }
11973 #endif
11974 
11975 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
11976  auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default;
11977 #else
11979  {
11980 # if defined( VULKAN_HPP_USE_REFLECT )
11981  return this->reflect() == rhs.reflect();
11982 # else
11983  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
11984 # endif
11985  }
11986 
11988  {
11989  return !operator==( rhs );
11990  }
11991 #endif
11992 
11993  public:
11995  const void * pNext = {};
11997  };
11998 
11999  template <>
12001  {
12003  };
12005 
12007  {
12009 
12010  static const bool allowDuplicate = false;
12012 
12013 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12014  VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
12015  : pNext( pNext_ )
12016  , opaqueCaptureAddress( opaqueCaptureAddress_ )
12017  {
12018  }
12019 
12021 
12023  : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs ) )
12024  {
12025  }
12026 
12028 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12029 
12031  {
12032  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
12033  return *this;
12034  }
12035 
12036 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
12038  {
12039  pNext = pNext_;
12040  return *this;
12041  }
12042 
12044  {
12045  opaqueCaptureAddress = opaqueCaptureAddress_;
12046  return *this;
12047  }
12048 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
12049 
12051  {
12052  return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo *>( this );
12053  }
12054 
12056  {
12057  return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo *>( this );
12058  }
12059 
12060 #if defined( VULKAN_HPP_USE_REFLECT )
12061 # if 14 <= VULKAN_HPP_CPP_VERSION
12062  auto
12063 # else
12064  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
12065 # endif
12066  reflect() const VULKAN_HPP_NOEXCEPT
12067  {
12068  return std::tie( sType, pNext, opaqueCaptureAddress );
12069  }
12070 #endif
12071 
12072 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
12073  auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default;
12074 #else
12076  {
12077 # if defined( VULKAN_HPP_USE_REFLECT )
12078  return this->reflect() == rhs.reflect();
12079 # else
12080  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
12081 # endif
12082  }
12083 
12085  {
12086  return !operator==( rhs );
12087  }
12088 #endif
12089 
12090  public:
12092  const void * pNext = {};
12093  uint64_t opaqueCaptureAddress = {};
12094  };
12095 
12096  template <>
12098  {
12100  };
12102 
12104  {
12106 
12107  static const bool allowDuplicate = false;
12109 
12110 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12112  VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
12114  VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
12116  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
12117  : pNext( pNext_ )
12118  , flags( flags_ )
12119  , buffer( buffer_ )
12120  , format( format_ )
12121  , offset( offset_ )
12122  , range( range_ )
12123  {
12124  }
12125 
12127 
12129  : BufferViewCreateInfo( *reinterpret_cast<BufferViewCreateInfo const *>( &rhs ) )
12130  {
12131  }
12132 
12134 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12135 
12137  {
12138  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>( &rhs );
12139  return *this;
12140  }
12141 
12142 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
12144  {
12145  pNext = pNext_;
12146  return *this;
12147  }
12148 
12150  {
12151  flags = flags_;
12152  return *this;
12153  }
12154 
12156  {
12157  buffer = buffer_;
12158  return *this;
12159  }
12160 
12162  {
12163  format = format_;
12164  return *this;
12165  }
12166 
12168  {
12169  offset = offset_;
12170  return *this;
12171  }
12172 
12174  {
12175  range = range_;
12176  return *this;
12177  }
12178 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
12179 
12181  {
12182  return *reinterpret_cast<const VkBufferViewCreateInfo *>( this );
12183  }
12184 
12186  {
12187  return *reinterpret_cast<VkBufferViewCreateInfo *>( this );
12188  }
12189 
12190 #if defined( VULKAN_HPP_USE_REFLECT )
12191 # if 14 <= VULKAN_HPP_CPP_VERSION
12192  auto
12193 # else
12194  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
12195  const void * const &,
12201 # endif
12202  reflect() const VULKAN_HPP_NOEXCEPT
12203  {
12204  return std::tie( sType, pNext, flags, buffer, format, offset, range );
12205  }
12206 #endif
12207 
12208 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
12209  auto operator<=>( BufferViewCreateInfo const & ) const = default;
12210 #else
12212  {
12213 # if defined( VULKAN_HPP_USE_REFLECT )
12214  return this->reflect() == rhs.reflect();
12215 # else
12216  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && ( format == rhs.format ) &&
12217  ( offset == rhs.offset ) && ( range == rhs.range );
12218 # endif
12219  }
12220 
12222  {
12223  return !operator==( rhs );
12224  }
12225 #endif
12226 
12227  public:
12229  const void * pNext = {};
12235  };
12236 
12237  template <>
12239  {
12241  };
12242 
12244  {
12246 
12247  static const bool allowDuplicate = false;
12249 
12250 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12252  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
12253  : pNext( pNext_ )
12254  , timeDomain( timeDomain_ )
12255  {
12256  }
12257 
12259 
12261  : CalibratedTimestampInfoEXT( *reinterpret_cast<CalibratedTimestampInfoEXT const *>( &rhs ) )
12262  {
12263  }
12264 
12266 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12267 
12269  {
12270  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>( &rhs );
12271  return *this;
12272  }
12273 
12274 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
12276  {
12277  pNext = pNext_;
12278  return *this;
12279  }
12280 
12282  {
12283  timeDomain = timeDomain_;
12284  return *this;
12285  }
12286 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
12287 
12289  {
12290  return *reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( this );
12291  }
12292 
12294  {
12295  return *reinterpret_cast<VkCalibratedTimestampInfoEXT *>( this );
12296  }
12297 
12298 #if defined( VULKAN_HPP_USE_REFLECT )
12299 # if 14 <= VULKAN_HPP_CPP_VERSION
12300  auto
12301 # else
12302  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TimeDomainEXT const &>
12303 # endif
12304  reflect() const VULKAN_HPP_NOEXCEPT
12305  {
12306  return std::tie( sType, pNext, timeDomain );
12307  }
12308 #endif
12309 
12310 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
12311  auto operator<=>( CalibratedTimestampInfoEXT const & ) const = default;
12312 #else
12314  {
12315 # if defined( VULKAN_HPP_USE_REFLECT )
12316  return this->reflect() == rhs.reflect();
12317 # else
12318  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain );
12319 # endif
12320  }
12321 
12323  {
12324  return !operator==( rhs );
12325  }
12326 #endif
12327 
12328  public:
12330  const void * pNext = {};
12332  };
12333 
12334  template <>
12336  {
12338  };
12339 
12341  {
12343 
12344  static const bool allowDuplicate = false;
12346 
12347 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12349  CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
12350  : pNext( pNext_ )
12351  , stage( stage_ )
12352  , pCheckpointMarker( pCheckpointMarker_ )
12353  {
12354  }
12355 
12357 
12358  CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) ) {}
12359 
12361 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12362 
12364  {
12365  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointData2NV const *>( &rhs );
12366  return *this;
12367  }
12368 
12369  operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT
12370  {
12371  return *reinterpret_cast<const VkCheckpointData2NV *>( this );
12372  }
12373 
12375  {
12376  return *reinterpret_cast<VkCheckpointData2NV *>( this );
12377  }
12378 
12379 #if defined( VULKAN_HPP_USE_REFLECT )
12380 # if 14 <= VULKAN_HPP_CPP_VERSION
12381  auto
12382 # else
12383  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, void * const &>
12384 # endif
12385  reflect() const VULKAN_HPP_NOEXCEPT
12386  {
12387  return std::tie( sType, pNext, stage, pCheckpointMarker );
12388  }
12389 #endif
12390 
12391 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
12392  auto operator<=>( CheckpointData2NV const & ) const = default;
12393 #else
12395  {
12396 # if defined( VULKAN_HPP_USE_REFLECT )
12397  return this->reflect() == rhs.reflect();
12398 # else
12399  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker );
12400 # endif
12401  }
12402 
12404  {
12405  return !operator==( rhs );
12406  }
12407 #endif
12408 
12409  public:
12411  void * pNext = {};
12413  void * pCheckpointMarker = {};
12414  };
12415 
12416  template <>
12418  {
12420  };
12421 
12423  {
12425 
12426  static const bool allowDuplicate = false;
12428 
12429 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12431  void * pCheckpointMarker_ = {},
12432  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
12433  : pNext( pNext_ )
12434  , stage( stage_ )
12435  , pCheckpointMarker( pCheckpointMarker_ )
12436  {
12437  }
12438 
12440 
12441  CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointDataNV( *reinterpret_cast<CheckpointDataNV const *>( &rhs ) ) {}
12442 
12444 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12445 
12447  {
12448  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>( &rhs );
12449  return *this;
12450  }
12451 
12452  operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT
12453  {
12454  return *reinterpret_cast<const VkCheckpointDataNV *>( this );
12455  }
12456 
12458  {
12459  return *reinterpret_cast<VkCheckpointDataNV *>( this );
12460  }
12461 
12462 #if defined( VULKAN_HPP_USE_REFLECT )
12463 # if 14 <= VULKAN_HPP_CPP_VERSION
12464  auto
12465 # else
12466  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlagBits const &, void * const &>
12467 # endif
12468  reflect() const VULKAN_HPP_NOEXCEPT
12469  {
12470  return std::tie( sType, pNext, stage, pCheckpointMarker );
12471  }
12472 #endif
12473 
12474 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
12475  auto operator<=>( CheckpointDataNV const & ) const = default;
12476 #else
12478  {
12479 # if defined( VULKAN_HPP_USE_REFLECT )
12480  return this->reflect() == rhs.reflect();
12481 # else
12482  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker );
12483 # endif
12484  }
12485 
12487  {
12488  return !operator==( rhs );
12489  }
12490 #endif
12491 
12492  public:
12494  void * pNext = {};
12496  void * pCheckpointMarker = {};
12497  };
12498 
12499  template <>
12501  {
12503  };
12504 
12506  {
12508 #if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
12509 
12510  VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<float, 4> & float32_ = {} ) : float32( float32_ ) {}
12511 
12512  VULKAN_HPP_CONSTEXPR ClearColorValue( float float32_0, float float32_1, float float32_2, float float32_3 )
12513  : float32( { float32_0, float32_1, float32_2, float32_3 } )
12514  {
12515  }
12516 
12517  VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<int32_t, 4> & int32_ ) : int32( int32_ ) {}
12518 
12519  VULKAN_HPP_CONSTEXPR ClearColorValue( int32_t int32_0, int32_t int32_1, int32_t int32_2, int32_t int32_3 ) : int32( { int32_0, int32_1, int32_2, int32_3 } )
12520  {
12521  }
12522 
12523  VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<uint32_t, 4> & uint32_ ) : uint32( uint32_ ) {}
12524 
12525  VULKAN_HPP_CONSTEXPR ClearColorValue( uint32_t uint32_0, uint32_t uint32_1, uint32_t uint32_2, uint32_t uint32_3 )
12526  : uint32( { uint32_0, uint32_1, uint32_2, uint32_3 } )
12527  {
12528  }
12529 #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
12530 
12531 #if !defined( VULKAN_HPP_NO_UNION_SETTERS )
12532  VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array<float, 4> float32_ ) VULKAN_HPP_NOEXCEPT
12533  {
12534  float32 = float32_;
12535  return *this;
12536  }
12537 
12538  VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array<int32_t, 4> int32_ ) VULKAN_HPP_NOEXCEPT
12539  {
12540  int32 = int32_;
12541  return *this;
12542  }
12543 
12544  VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array<uint32_t, 4> uint32_ ) VULKAN_HPP_NOEXCEPT
12545  {
12546  uint32 = uint32_;
12547  return *this;
12548  }
12549 #endif /*VULKAN_HPP_NO_UNION_SETTERS*/
12550 
12551  operator VkClearColorValue const &() const
12552  {
12553  return *reinterpret_cast<const VkClearColorValue *>( this );
12554  }
12555 
12556  operator VkClearColorValue &()
12557  {
12558  return *reinterpret_cast<VkClearColorValue *>( this );
12559  }
12560 
12564  };
12565 
12567  {
12569 
12570 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12571  VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT
12572  : depth( depth_ )
12573  , stencil( stencil_ )
12574  {
12575  }
12576 
12578 
12580  : ClearDepthStencilValue( *reinterpret_cast<ClearDepthStencilValue const *>( &rhs ) )
12581  {
12582  }
12583 
12585 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12586 
12588  {
12589  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>( &rhs );
12590  return *this;
12591  }
12592 
12593 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
12595  {
12596  depth = depth_;
12597  return *this;
12598  }
12599 
12601  {
12602  stencil = stencil_;
12603  return *this;
12604  }
12605 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
12606 
12608  {
12609  return *reinterpret_cast<const VkClearDepthStencilValue *>( this );
12610  }
12611 
12613  {
12614  return *reinterpret_cast<VkClearDepthStencilValue *>( this );
12615  }
12616 
12617 #if defined( VULKAN_HPP_USE_REFLECT )
12618 # if 14 <= VULKAN_HPP_CPP_VERSION
12619  auto
12620 # else
12621  std::tuple<float const &, uint32_t const &>
12622 # endif
12623  reflect() const VULKAN_HPP_NOEXCEPT
12624  {
12625  return std::tie( depth, stencil );
12626  }
12627 #endif
12628 
12629 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
12630  auto operator<=>( ClearDepthStencilValue const & ) const = default;
12631 #else
12633  {
12634 # if defined( VULKAN_HPP_USE_REFLECT )
12635  return this->reflect() == rhs.reflect();
12636 # else
12637  return ( depth == rhs.depth ) && ( stencil == rhs.stencil );
12638 # endif
12639  }
12640 
12642  {
12643  return !operator==( rhs );
12644  }
12645 #endif
12646 
12647  public:
12648  float depth = {};
12649  uint32_t stencil = {};
12650  };
12651 
12653  {
12655 #if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
12656 
12658 
12660 #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
12661 
12662 #if !defined( VULKAN_HPP_NO_UNION_SETTERS )
12664  {
12665  color = color_;
12666  return *this;
12667  }
12668 
12670  {
12671  depthStencil = depthStencil_;
12672  return *this;
12673  }
12674 #endif /*VULKAN_HPP_NO_UNION_SETTERS*/
12675 
12676  operator VkClearValue const &() const
12677  {
12678  return *reinterpret_cast<const VkClearValue *>( this );
12679  }
12680 
12681  operator VkClearValue &()
12682  {
12683  return *reinterpret_cast<VkClearValue *>( this );
12684  }
12685 
12686 #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
12689 #else
12692 #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
12693  };
12694 
12696  {
12698 
12699 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12701  uint32_t colorAttachment_ = {},
12703  : aspectMask( aspectMask_ )
12704  , colorAttachment( colorAttachment_ )
12705  , clearValue( clearValue_ )
12706  {
12707  }
12708 
12710 
12711  ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT : ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) ) {}
12712 
12713  ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
12714 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12715 
12717  {
12718  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>( &rhs );
12719  return *this;
12720  }
12721 
12722 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
12724  {
12725  aspectMask = aspectMask_;
12726  return *this;
12727  }
12728 
12730  {
12731  colorAttachment = colorAttachment_;
12732  return *this;
12733  }
12734 
12736  {
12737  clearValue = clearValue_;
12738  return *this;
12739  }
12740 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
12741 
12742  operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT
12743  {
12744  return *reinterpret_cast<const VkClearAttachment *>( this );
12745  }
12746 
12748  {
12749  return *reinterpret_cast<VkClearAttachment *>( this );
12750  }
12751 
12752 #if defined( VULKAN_HPP_USE_REFLECT )
12753 # if 14 <= VULKAN_HPP_CPP_VERSION
12754  auto
12755 # else
12756  std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
12757 # endif
12758  reflect() const VULKAN_HPP_NOEXCEPT
12759  {
12760  return std::tie( aspectMask, colorAttachment, clearValue );
12761  }
12762 #endif
12763 
12764  public:
12766  uint32_t colorAttachment = {};
12768  };
12769 
12770  struct ClearRect
12771  {
12773 
12774 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12775  VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
12776  : rect( rect_ )
12777  , baseArrayLayer( baseArrayLayer_ )
12778  , layerCount( layerCount_ )
12779  {
12780  }
12781 
12783 
12784  ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) ) {}
12785 
12786  ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
12787 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12788 
12790  {
12791  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>( &rhs );
12792  return *this;
12793  }
12794 
12795 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
12797  {
12798  rect = rect_;
12799  return *this;
12800  }
12801 
12803  {
12804  baseArrayLayer = baseArrayLayer_;
12805  return *this;
12806  }
12807 
12809  {
12810  layerCount = layerCount_;
12811  return *this;
12812  }
12813 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
12814 
12815  operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT
12816  {
12817  return *reinterpret_cast<const VkClearRect *>( this );
12818  }
12819 
12821  {
12822  return *reinterpret_cast<VkClearRect *>( this );
12823  }
12824 
12825 #if defined( VULKAN_HPP_USE_REFLECT )
12826 # if 14 <= VULKAN_HPP_CPP_VERSION
12827  auto
12828 # else
12829  std::tuple<VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, uint32_t const &>
12830 # endif
12831  reflect() const VULKAN_HPP_NOEXCEPT
12832  {
12833  return std::tie( rect, baseArrayLayer, layerCount );
12834  }
12835 #endif
12836 
12837 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
12838  auto operator<=>( ClearRect const & ) const = default;
12839 #else
12840  bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
12841  {
12842 # if defined( VULKAN_HPP_USE_REFLECT )
12843  return this->reflect() == rhs.reflect();
12844 # else
12845  return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
12846 # endif
12847  }
12848 
12849  bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
12850  {
12851  return !operator==( rhs );
12852  }
12853 #endif
12854 
12855  public:
12857  uint32_t baseArrayLayer = {};
12858  uint32_t layerCount = {};
12859  };
12860 
12862  {
12864 
12865 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12866  VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT
12867  : pixelX( pixelX_ )
12868  , pixelY( pixelY_ )
12869  , sample( sample_ )
12870  {
12871  }
12872 
12874 
12876  : CoarseSampleLocationNV( *reinterpret_cast<CoarseSampleLocationNV const *>( &rhs ) )
12877  {
12878  }
12879 
12881 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12882 
12884  {
12885  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>( &rhs );
12886  return *this;
12887  }
12888 
12889 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
12891  {
12892  pixelX = pixelX_;
12893  return *this;
12894  }
12895 
12897  {
12898  pixelY = pixelY_;
12899  return *this;
12900  }
12901 
12903  {
12904  sample = sample_;
12905  return *this;
12906  }
12907 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
12908 
12910  {
12911  return *reinterpret_cast<const VkCoarseSampleLocationNV *>( this );
12912  }
12913 
12915  {
12916  return *reinterpret_cast<VkCoarseSampleLocationNV *>( this );
12917  }
12918 
12919 #if defined( VULKAN_HPP_USE_REFLECT )
12920 # if 14 <= VULKAN_HPP_CPP_VERSION
12921  auto
12922 # else
12923  std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
12924 # endif
12925  reflect() const VULKAN_HPP_NOEXCEPT
12926  {
12927  return std::tie( pixelX, pixelY, sample );
12928  }
12929 #endif
12930 
12931 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
12932  auto operator<=>( CoarseSampleLocationNV const & ) const = default;
12933 #else
12935  {
12936 # if defined( VULKAN_HPP_USE_REFLECT )
12937  return this->reflect() == rhs.reflect();
12938 # else
12939  return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample );
12940 # endif
12941  }
12942 
12944  {
12945  return !operator==( rhs );
12946  }
12947 #endif
12948 
12949  public:
12950  uint32_t pixelX = {};
12951  uint32_t pixelY = {};
12952  uint32_t sample = {};
12953  };
12954 
12956  {
12958 
12959 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
12962  uint32_t sampleCount_ = {},
12963  uint32_t sampleLocationCount_ = {},
12964  const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
12965  : shadingRate( shadingRate_ )
12966  , sampleCount( sampleCount_ )
12967  , sampleLocationCount( sampleLocationCount_ )
12968  , pSampleLocations( pSampleLocations_ )
12969  {
12970  }
12971 
12973 
12975  : CoarseSampleOrderCustomNV( *reinterpret_cast<CoarseSampleOrderCustomNV const *>( &rhs ) )
12976  {
12977  }
12978 
12979 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
12981  uint32_t sampleCount_,
12983  : shadingRate( shadingRate_ )
12984  , sampleCount( sampleCount_ )
12985  , sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) )
12986  , pSampleLocations( sampleLocations_.data() )
12987  {
12988  }
12989 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
12990 
12992 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
12993 
12995  {
12996  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>( &rhs );
12997  return *this;
12998  }
12999 
13000 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
13002  {
13003  shadingRate = shadingRate_;
13004  return *this;
13005  }
13006 
13008  {
13009  sampleCount = sampleCount_;
13010  return *this;
13011  }
13012 
13014  {
13015  sampleLocationCount = sampleLocationCount_;
13016  return *this;
13017  }
13018 
13021  {
13022  pSampleLocations = pSampleLocations_;
13023  return *this;
13024  }
13025 
13026 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
13029  {
13030  sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
13031  pSampleLocations = sampleLocations_.data();
13032  return *this;
13033  }
13034 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
13035 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
13036 
13038  {
13039  return *reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( this );
13040  }
13041 
13043  {
13044  return *reinterpret_cast<VkCoarseSampleOrderCustomNV *>( this );
13045  }
13046 
13047 #if defined( VULKAN_HPP_USE_REFLECT )
13048 # if 14 <= VULKAN_HPP_CPP_VERSION
13049  auto
13050 # else
13052  uint32_t const &,
13053  uint32_t const &,
13055 # endif
13056  reflect() const VULKAN_HPP_NOEXCEPT
13057  {
13059  }
13060 #endif
13061 
13062 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
13063  auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default;
13064 #else
13066  {
13067 # if defined( VULKAN_HPP_USE_REFLECT )
13068  return this->reflect() == rhs.reflect();
13069 # else
13070  return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && ( sampleLocationCount == rhs.sampleLocationCount ) &&
13071  ( pSampleLocations == rhs.pSampleLocations );
13072 # endif
13073  }
13074 
13076  {
13077  return !operator==( rhs );
13078  }
13079 #endif
13080 
13081  public:
13083  uint32_t sampleCount = {};
13084  uint32_t sampleLocationCount = {};
13086  };
13087 
13089  {
13091 
13092 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
13094  VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {},
13095  VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {},
13098  : advancedBlendOp( advancedBlendOp_ )
13099  , srcPremultiplied( srcPremultiplied_ )
13100  , dstPremultiplied( dstPremultiplied_ )
13101  , blendOverlap( blendOverlap_ )
13102  , clampResults( clampResults_ )
13103  {
13104  }
13105 
13107 
13109  : ColorBlendAdvancedEXT( *reinterpret_cast<ColorBlendAdvancedEXT const *>( &rhs ) )
13110  {
13111  }
13112 
13114 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
13115 
13117  {
13118  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const *>( &rhs );
13119  return *this;
13120  }
13121 
13122 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
13124  {
13125  advancedBlendOp = advancedBlendOp_;
13126  return *this;
13127  }
13128 
13130  {
13131  srcPremultiplied = srcPremultiplied_;
13132  return *this;
13133  }
13134 
13136  {
13137  dstPremultiplied = dstPremultiplied_;
13138  return *this;
13139  }
13140 
13142  {
13143  blendOverlap = blendOverlap_;
13144  return *this;
13145  }
13146 
13148  {
13149  clampResults = clampResults_;
13150  return *this;
13151  }
13152 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
13153 
13155  {
13156  return *reinterpret_cast<const VkColorBlendAdvancedEXT *>( this );
13157  }
13158 
13160  {
13161  return *reinterpret_cast<VkColorBlendAdvancedEXT *>( this );
13162  }
13163 
13164 #if defined( VULKAN_HPP_USE_REFLECT )
13165 # if 14 <= VULKAN_HPP_CPP_VERSION
13166  auto
13167 # else
13168  std::tuple<VULKAN_HPP_NAMESPACE::BlendOp const &,
13173 # endif
13174  reflect() const VULKAN_HPP_NOEXCEPT
13175  {
13177  }
13178 #endif
13179 
13180 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
13181  auto operator<=>( ColorBlendAdvancedEXT const & ) const = default;
13182 #else
13184  {
13185 # if defined( VULKAN_HPP_USE_REFLECT )
13186  return this->reflect() == rhs.reflect();
13187 # else
13188  return ( advancedBlendOp == rhs.advancedBlendOp ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) &&
13189  ( blendOverlap == rhs.blendOverlap ) && ( clampResults == rhs.clampResults );
13190 # endif
13191  }
13192 
13194  {
13195  return !operator==( rhs );
13196  }
13197 #endif
13198 
13199  public:
13205  };
13206 
13208  {
13210 
13211 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
13218  : srcColorBlendFactor( srcColorBlendFactor_ )
13219  , dstColorBlendFactor( dstColorBlendFactor_ )
13220  , colorBlendOp( colorBlendOp_ )
13221  , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
13222  , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
13223  , alphaBlendOp( alphaBlendOp_ )
13224  {
13225  }
13226 
13228 
13230  : ColorBlendEquationEXT( *reinterpret_cast<ColorBlendEquationEXT const *>( &rhs ) )
13231  {
13232  }
13233 
13235 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
13236 
13238  {
13239  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const *>( &rhs );
13240  return *this;
13241  }
13242 
13243 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
13245  {
13246  srcColorBlendFactor = srcColorBlendFactor_;
13247  return *this;
13248  }
13249 
13251  {
13252  dstColorBlendFactor = dstColorBlendFactor_;
13253  return *this;
13254  }
13255 
13257  {
13258  colorBlendOp = colorBlendOp_;
13259  return *this;
13260  }
13261 
13263  {
13264  srcAlphaBlendFactor = srcAlphaBlendFactor_;
13265  return *this;
13266  }
13267 
13269  {
13270  dstAlphaBlendFactor = dstAlphaBlendFactor_;
13271  return *this;
13272  }
13273 
13275  {
13276  alphaBlendOp = alphaBlendOp_;
13277  return *this;
13278  }
13279 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
13280 
13282  {
13283  return *reinterpret_cast<const VkColorBlendEquationEXT *>( this );
13284  }
13285 
13287  {
13288  return *reinterpret_cast<VkColorBlendEquationEXT *>( this );
13289  }
13290 
13291 #if defined( VULKAN_HPP_USE_REFLECT )
13292 # if 14 <= VULKAN_HPP_CPP_VERSION
13293  auto
13294 # else
13295  std::tuple<VULKAN_HPP_NAMESPACE::BlendFactor const &,
13301 # endif
13302  reflect() const VULKAN_HPP_NOEXCEPT
13303  {
13305  }
13306 #endif
13307 
13308 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
13309  auto operator<=>( ColorBlendEquationEXT const & ) const = default;
13310 #else
13312  {
13313 # if defined( VULKAN_HPP_USE_REFLECT )
13314  return this->reflect() == rhs.reflect();
13315 # else
13316  return ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) &&
13317  ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && ( alphaBlendOp == rhs.alphaBlendOp );
13318 # endif
13319  }
13320 
13322  {
13323  return !operator==( rhs );
13324  }
13325 #endif
13326 
13327  public:
13334  };
13335 
13337  {
13339 
13340  static const bool allowDuplicate = false;
13342 
13343 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
13346  uint32_t commandBufferCount_ = {},
13347  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
13348  : pNext( pNext_ )
13349  , commandPool( commandPool_ )
13350  , level( level_ )
13351  , commandBufferCount( commandBufferCount_ )
13352  {
13353  }
13354 
13356 
13358  : CommandBufferAllocateInfo( *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs ) )
13359  {
13360  }
13361 
13363 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
13364 
13366  {
13367  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>( &rhs );
13368  return *this;
13369  }
13370 
13371 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
13373  {
13374  pNext = pNext_;
13375  return *this;
13376  }
13377 
13379  {
13380  commandPool = commandPool_;
13381  return *this;
13382  }
13383 
13385  {
13386  level = level_;
13387  return *this;
13388  }
13389 
13391  {
13392  commandBufferCount = commandBufferCount_;
13393  return *this;
13394  }
13395 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
13396 
13398  {
13399  return *reinterpret_cast<const VkCommandBufferAllocateInfo *>( this );
13400  }
13401 
13403  {
13404  return *reinterpret_cast<VkCommandBufferAllocateInfo *>( this );
13405  }
13406 
13407 #if defined( VULKAN_HPP_USE_REFLECT )
13408 # if 14 <= VULKAN_HPP_CPP_VERSION
13409  auto
13410 # else
13411  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
13412  const void * const &,
13415  uint32_t const &>
13416 # endif
13417  reflect() const VULKAN_HPP_NOEXCEPT
13418  {
13419  return std::tie( sType, pNext, commandPool, level, commandBufferCount );
13420  }
13421 #endif
13422 
13423 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
13424  auto operator<=>( CommandBufferAllocateInfo const & ) const = default;
13425 #else
13427  {
13428 # if defined( VULKAN_HPP_USE_REFLECT )
13429  return this->reflect() == rhs.reflect();
13430 # else
13431  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && ( level == rhs.level ) &&
13432  ( commandBufferCount == rhs.commandBufferCount );
13433 # endif
13434  }
13435 
13437  {
13438  return !operator==( rhs );
13439  }
13440 #endif
13441 
13442  public:
13444  const void * pNext = {};
13447  uint32_t commandBufferCount = {};
13448  };
13449 
13450  template <>
13452  {
13454  };
13455 
13457  {
13459 
13460  static const bool allowDuplicate = false;
13462 
13463 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
13465  uint32_t subpass_ = {},
13466  VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
13467  VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {},
13468  VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {},
13469  VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {},
13470  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
13471  : pNext( pNext_ )
13472  , renderPass( renderPass_ )
13473  , subpass( subpass_ )
13474  , framebuffer( framebuffer_ )
13475  , occlusionQueryEnable( occlusionQueryEnable_ )
13476  , queryFlags( queryFlags_ )
13477  , pipelineStatistics( pipelineStatistics_ )
13478  {
13479  }
13480 
13482 
13484  : CommandBufferInheritanceInfo( *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs ) )
13485  {
13486  }
13487 
13489 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
13490 
13492  {
13493  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>( &rhs );
13494  return *this;
13495  }
13496 
13497 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
13499  {
13500  pNext = pNext_;
13501  return *this;
13502  }
13503 
13505  {
13506  renderPass = renderPass_;
13507  return *this;
13508  }
13509 
13511  {
13512  subpass = subpass_;
13513  return *this;
13514  }
13515 
13517  {
13518  framebuffer = framebuffer_;
13519  return *this;
13520  }
13521 
13523  {
13524  occlusionQueryEnable = occlusionQueryEnable_;
13525  return *this;
13526  }
13527 
13529  {
13530  queryFlags = queryFlags_;
13531  return *this;
13532  }
13533 
13536  {
13537  pipelineStatistics = pipelineStatistics_;
13538  return *this;
13539  }
13540 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
13541 
13543  {
13544  return *reinterpret_cast<const VkCommandBufferInheritanceInfo *>( this );
13545  }
13546 
13548  {
13549  return *reinterpret_cast<VkCommandBufferInheritanceInfo *>( this );
13550  }
13551 
13552 #if defined( VULKAN_HPP_USE_REFLECT )
13553 # if 14 <= VULKAN_HPP_CPP_VERSION
13554  auto
13555 # else
13556  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
13557  const void * const &,
13559  uint32_t const &,
13564 # endif
13565  reflect() const VULKAN_HPP_NOEXCEPT
13566  {
13568  }
13569 #endif
13570 
13571 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
13572  auto operator<=>( CommandBufferInheritanceInfo const & ) const = default;
13573 #else
13575  {
13576 # if defined( VULKAN_HPP_USE_REFLECT )
13577  return this->reflect() == rhs.reflect();
13578 # else
13579  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) &&
13580  ( framebuffer == rhs.framebuffer ) && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) &&
13581  ( pipelineStatistics == rhs.pipelineStatistics );
13582 # endif
13583  }
13584 
13586  {
13587  return !operator==( rhs );
13588  }
13589 #endif
13590 
13591  public:
13593  const void * pNext = {};
13595  uint32_t subpass = {};
13600  };
13601 
13602  template <>
13604  {
13606  };
13607 
13609  {
13611 
13612  static const bool allowDuplicate = false;
13614 
13615 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
13617  const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {},
13618  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
13619  : pNext( pNext_ )
13620  , flags( flags_ )
13621  , pInheritanceInfo( pInheritanceInfo_ )
13622  {
13623  }
13624 
13626 
13628  : CommandBufferBeginInfo( *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs ) )
13629  {
13630  }
13631 
13633 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
13634 
13636  {
13637  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>( &rhs );
13638  return *this;
13639  }
13640 
13641 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
13643  {
13644  pNext = pNext_;
13645  return *this;
13646  }
13647 
13649  {
13650  flags = flags_;
13651  return *this;
13652  }
13653 
13656  {
13657  pInheritanceInfo = pInheritanceInfo_;
13658  return *this;
13659  }
13660 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
13661 
13663  {
13664  return *reinterpret_cast<const VkCommandBufferBeginInfo *>( this );
13665  }
13666 
13668  {
13669  return *reinterpret_cast<VkCommandBufferBeginInfo *>( this );
13670  }
13671 
13672 #if defined( VULKAN_HPP_USE_REFLECT )
13673 # if 14 <= VULKAN_HPP_CPP_VERSION
13674  auto
13675 # else
13676  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
13677  const void * const &,
13680 # endif
13681  reflect() const VULKAN_HPP_NOEXCEPT
13682  {
13683  return std::tie( sType, pNext, flags, pInheritanceInfo );
13684  }
13685 #endif
13686 
13687 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
13688  auto operator<=>( CommandBufferBeginInfo const & ) const = default;
13689 #else
13691  {
13692 # if defined( VULKAN_HPP_USE_REFLECT )
13693  return this->reflect() == rhs.reflect();
13694 # else
13695  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pInheritanceInfo == rhs.pInheritanceInfo );
13696 # endif
13697  }
13698 
13700  {
13701  return !operator==( rhs );
13702  }
13703 #endif
13704 
13705  public:
13707  const void * pNext = {};
13710  };
13711 
13712  template <>
13714  {
13716  };
13717 
13719  {
13721 
13722  static const bool allowDuplicate = false;
13724 
13725 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
13727  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
13728  : pNext( pNext_ )
13729  , conditionalRenderingEnable( conditionalRenderingEnable_ )
13730  {
13731  }
13732 
13735 
13737  : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs ) )
13738  {
13739  }
13740 
13743 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
13744 
13746  {
13747  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
13748  return *this;
13749  }
13750 
13751 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
13753  {
13754  pNext = pNext_;
13755  return *this;
13756  }
13757 
13760  {
13761  conditionalRenderingEnable = conditionalRenderingEnable_;
13762  return *this;
13763  }
13764 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
13765 
13767  {
13768  return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
13769  }
13770 
13772  {
13773  return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
13774  }
13775 
13776 #if defined( VULKAN_HPP_USE_REFLECT )
13777 # if 14 <= VULKAN_HPP_CPP_VERSION
13778  auto
13779 # else
13780  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
13781 # endif
13782  reflect() const VULKAN_HPP_NOEXCEPT
13783  {
13784  return std::tie( sType, pNext, conditionalRenderingEnable );
13785  }
13786 #endif
13787 
13788 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
13789  auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default;
13790 #else
13792  {
13793 # if defined( VULKAN_HPP_USE_REFLECT )
13794  return this->reflect() == rhs.reflect();
13795 # else
13796  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
13797 # endif
13798  }
13799 
13801  {
13802  return !operator==( rhs );
13803  }
13804 #endif
13805 
13806  public:
13808  const void * pNext = {};
13810  };
13811 
13812  template <>
13814  {
13816  };
13817 
13819  {
13821 
13822  static const bool allowDuplicate = false;
13824 
13825 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
13828  VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
13829  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
13830  : pNext( pNext_ )
13831  , transform( transform_ )
13832  , renderArea( renderArea_ )
13833  {
13834  }
13835 
13838 
13840  : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs ) )
13841  {
13842  }
13843 
13846 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
13847 
13849  {
13850  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
13851  return *this;
13852  }
13853 
13854 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
13856  {
13857  pNext = pNext_;
13858  return *this;
13859  }
13860 
13863  {
13864  transform = transform_;
13865  return *this;
13866  }
13867 
13870  {
13871  renderArea = renderArea_;
13872  return *this;
13873  }
13874 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
13875 
13877  {
13878  return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
13879  }
13880 
13882  {
13883  return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
13884  }
13885 
13886 #if defined( VULKAN_HPP_USE_REFLECT )
13887 # if 14 <= VULKAN_HPP_CPP_VERSION
13888  auto
13889 # else
13890  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
13891  void * const &,
13894 # endif
13895  reflect() const VULKAN_HPP_NOEXCEPT
13896  {
13897  return std::tie( sType, pNext, transform, renderArea );
13898  }
13899 #endif
13900 
13901 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
13902  auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default;
13903 #else
13905  {
13906 # if defined( VULKAN_HPP_USE_REFLECT )
13907  return this->reflect() == rhs.reflect();
13908 # else
13909  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && ( renderArea == rhs.renderArea );
13910 # endif
13911  }
13912 
13914  {
13915  return !operator==( rhs );
13916  }
13917 #endif
13918 
13919  public:
13921  void * pNext = {};
13924  };
13925 
13926  template <>
13928  {
13930  };
13931 
13933  {
13935 
13936  static const bool allowDuplicate = false;
13938 
13939 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
13942  uint32_t viewMask_ = {},
13943  uint32_t colorAttachmentCount_ = {},
13944  const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {},
13948  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
13949  : pNext( pNext_ )
13950  , flags( flags_ )
13951  , viewMask( viewMask_ )
13952  , colorAttachmentCount( colorAttachmentCount_ )
13953  , pColorAttachmentFormats( pColorAttachmentFormats_ )
13954  , depthAttachmentFormat( depthAttachmentFormat_ )
13955  , stencilAttachmentFormat( stencilAttachmentFormat_ )
13956  , rasterizationSamples( rasterizationSamples_ )
13957  {
13958  }
13959 
13961 
13963  : CommandBufferInheritanceRenderingInfo( *reinterpret_cast<CommandBufferInheritanceRenderingInfo const *>( &rhs ) )
13964  {
13965  }
13966 
13967 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
13969  uint32_t viewMask_,
13974  const void * pNext_ = nullptr )
13975  : pNext( pNext_ )
13976  , flags( flags_ )
13977  , viewMask( viewMask_ )
13978  , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
13979  , pColorAttachmentFormats( colorAttachmentFormats_.data() )
13980  , depthAttachmentFormat( depthAttachmentFormat_ )
13981  , stencilAttachmentFormat( stencilAttachmentFormat_ )
13982  , rasterizationSamples( rasterizationSamples_ )
13983  {
13984  }
13985 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
13986 
13988 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
13989 
13991  {
13992  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const *>( &rhs );
13993  return *this;
13994  }
13995 
13996 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
13998  {
13999  pNext = pNext_;
14000  return *this;
14001  }
14002 
14004  {
14005  flags = flags_;
14006  return *this;
14007  }
14008 
14010  {
14011  viewMask = viewMask_;
14012  return *this;
14013  }
14014 
14016  {
14017  colorAttachmentCount = colorAttachmentCount_;
14018  return *this;
14019  }
14020 
14023  {
14024  pColorAttachmentFormats = pColorAttachmentFormats_;
14025  return *this;
14026  }
14027 
14028 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
14031  {
14032  colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
14033  pColorAttachmentFormats = colorAttachmentFormats_.data();
14034  return *this;
14035  }
14036 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
14037 
14040  {
14041  depthAttachmentFormat = depthAttachmentFormat_;
14042  return *this;
14043  }
14044 
14047  {
14048  stencilAttachmentFormat = stencilAttachmentFormat_;
14049  return *this;
14050  }
14051 
14054  {
14055  rasterizationSamples = rasterizationSamples_;
14056  return *this;
14057  }
14058 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
14059 
14061  {
14062  return *reinterpret_cast<const VkCommandBufferInheritanceRenderingInfo *>( this );
14063  }
14064 
14066  {
14067  return *reinterpret_cast<VkCommandBufferInheritanceRenderingInfo *>( this );
14068  }
14069 
14070 #if defined( VULKAN_HPP_USE_REFLECT )
14071 # if 14 <= VULKAN_HPP_CPP_VERSION
14072  auto
14073 # else
14074  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
14075  const void * const &,
14077  uint32_t const &,
14078  uint32_t const &,
14079  const VULKAN_HPP_NAMESPACE::Format * const &,
14083 # endif
14084  reflect() const VULKAN_HPP_NOEXCEPT
14085  {
14086  return std::tie(
14088  }
14089 #endif
14090 
14091 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
14092  auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default;
14093 #else
14095  {
14096 # if defined( VULKAN_HPP_USE_REFLECT )
14097  return this->reflect() == rhs.reflect();
14098 # else
14099  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewMask == rhs.viewMask ) &&
14100  ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) &&
14101  ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) &&
14102  ( rasterizationSamples == rhs.rasterizationSamples );
14103 # endif
14104  }
14105 
14107  {
14108  return !operator==( rhs );
14109  }
14110 #endif
14111 
14112  public:
14114  const void * pNext = {};
14116  uint32_t viewMask = {};
14117  uint32_t colorAttachmentCount = {};
14122  };
14123 
14124  template <>
14126  {
14128  };
14130 
14131  struct Viewport
14132  {
14134 
14135 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
14137  Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT
14138  : x( x_ )
14139  , y( y_ )
14140  , width( width_ )
14141  , height( height_ )
14142  , minDepth( minDepth_ )
14143  , maxDepth( maxDepth_ )
14144  {
14145  }
14146 
14147  VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
14148 
14149  Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) ) {}
14150 
14151  Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
14152 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
14153 
14155  {
14156  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
14157  return *this;
14158  }
14159 
14160 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
14162  {
14163  x = x_;
14164  return *this;
14165  }
14166 
14168  {
14169  y = y_;
14170  return *this;
14171  }
14172 
14174  {
14175  width = width_;
14176  return *this;
14177  }
14178 
14180  {
14181  height = height_;
14182  return *this;
14183  }
14184 
14186  {
14187  minDepth = minDepth_;
14188  return *this;
14189  }
14190 
14192  {
14193  maxDepth = maxDepth_;
14194  return *this;
14195  }
14196 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
14197 
14198  operator VkViewport const &() const VULKAN_HPP_NOEXCEPT
14199  {
14200  return *reinterpret_cast<const VkViewport *>( this );
14201  }
14202 
14204  {
14205  return *reinterpret_cast<VkViewport *>( this );
14206  }
14207 
14208 #if defined( VULKAN_HPP_USE_REFLECT )
14209 # if 14 <= VULKAN_HPP_CPP_VERSION
14210  auto
14211 # else
14212  std::tuple<float const &, float const &, float const &, float const &, float const &, float const &>
14213 # endif
14214  reflect() const VULKAN_HPP_NOEXCEPT
14215  {
14216  return std::tie( x, y, width, height, minDepth, maxDepth );
14217  }
14218 #endif
14219 
14220 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
14221  auto operator<=>( Viewport const & ) const = default;
14222 #else
14223  bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
14224  {
14225 # if defined( VULKAN_HPP_USE_REFLECT )
14226  return this->reflect() == rhs.reflect();
14227 # else
14228  return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && ( minDepth == rhs.minDepth ) &&
14229  ( maxDepth == rhs.maxDepth );
14230 # endif
14231  }
14232 
14233  bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
14234  {
14235  return !operator==( rhs );
14236  }
14237 #endif
14238 
14239  public:
14240  float x = {};
14241  float y = {};
14242  float width = {};
14243  float height = {};
14244  float minDepth = {};
14245  float maxDepth = {};
14246  };
14247 
14249  {
14251 
14252  static const bool allowDuplicate = false;
14254 
14255 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
14257  uint32_t viewportDepthCount_ = {},
14258  const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {},
14259  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
14260  : pNext( pNext_ )
14261  , viewportScissor2D( viewportScissor2D_ )
14262  , viewportDepthCount( viewportDepthCount_ )
14263  , pViewportDepths( pViewportDepths_ )
14264  {
14265  }
14266 
14269 
14271  : CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast<CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs ) )
14272  {
14273  }
14274 
14276 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
14277 
14279  {
14280  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs );
14281  return *this;
14282  }
14283 
14284 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
14286  {
14287  pNext = pNext_;
14288  return *this;
14289  }
14290 
14293  {
14294  viewportScissor2D = viewportScissor2D_;
14295  return *this;
14296  }
14297 
14299  {
14300  viewportDepthCount = viewportDepthCount_;
14301  return *this;
14302  }
14303 
14306  {
14307  pViewportDepths = pViewportDepths_;
14308  return *this;
14309  }
14310 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
14311 
14313  {
14314  return *reinterpret_cast<const VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
14315  }
14316 
14318  {
14319  return *reinterpret_cast<VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
14320  }
14321 
14322 #if defined( VULKAN_HPP_USE_REFLECT )
14323 # if 14 <= VULKAN_HPP_CPP_VERSION
14324  auto
14325 # else
14326  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
14327  const void * const &,
14329  uint32_t const &,
14330  const VULKAN_HPP_NAMESPACE::Viewport * const &>
14331 # endif
14332  reflect() const VULKAN_HPP_NOEXCEPT
14333  {
14335  }
14336 #endif
14337 
14338 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
14339  auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default;
14340 #else
14342  {
14343 # if defined( VULKAN_HPP_USE_REFLECT )
14344  return this->reflect() == rhs.reflect();
14345 # else
14346  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) &&
14347  ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths );
14348 # endif
14349  }
14350 
14352  {
14353  return !operator==( rhs );
14354  }
14355 #endif
14356 
14357  public:
14359  const void * pNext = {};
14361  uint32_t viewportDepthCount = {};
14363  };
14364 
14365  template <>
14367  {
14369  };
14370 
14372  {
14374 
14375  static const bool allowDuplicate = false;
14377 
14378 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
14380  uint32_t deviceMask_ = {},
14381  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
14382  : pNext( pNext_ )
14383  , commandBuffer( commandBuffer_ )
14384  , deviceMask( deviceMask_ )
14385  {
14386  }
14387 
14389 
14391  : CommandBufferSubmitInfo( *reinterpret_cast<CommandBufferSubmitInfo const *>( &rhs ) )
14392  {
14393  }
14394 
14396 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
14397 
14399  {
14400  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const *>( &rhs );
14401  return *this;
14402  }
14403 
14404 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
14406  {
14407  pNext = pNext_;
14408  return *this;
14409  }
14410 
14412  {
14413  commandBuffer = commandBuffer_;
14414  return *this;
14415  }
14416 
14418  {
14419  deviceMask = deviceMask_;
14420  return *this;
14421  }
14422 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
14423 
14425  {
14426  return *reinterpret_cast<const VkCommandBufferSubmitInfo *>( this );
14427  }
14428 
14430  {
14431  return *reinterpret_cast<VkCommandBufferSubmitInfo *>( this );
14432  }
14433 
14434 #if defined( VULKAN_HPP_USE_REFLECT )
14435 # if 14 <= VULKAN_HPP_CPP_VERSION
14436  auto
14437 # else
14438  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandBuffer const &, uint32_t const &>
14439 # endif
14440  reflect() const VULKAN_HPP_NOEXCEPT
14441  {
14442  return std::tie( sType, pNext, commandBuffer, deviceMask );
14443  }
14444 #endif
14445 
14446 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
14447  auto operator<=>( CommandBufferSubmitInfo const & ) const = default;
14448 #else
14450  {
14451 # if defined( VULKAN_HPP_USE_REFLECT )
14452  return this->reflect() == rhs.reflect();
14453 # else
14454  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && ( deviceMask == rhs.deviceMask );
14455 # endif
14456  }
14457 
14459  {
14460  return !operator==( rhs );
14461  }
14462 #endif
14463 
14464  public:
14466  const void * pNext = {};
14468  uint32_t deviceMask = {};
14469  };
14470 
14471  template <>
14473  {
14475  };
14477 
14479  {
14481 
14482  static const bool allowDuplicate = false;
14484 
14485 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
14487  uint32_t queueFamilyIndex_ = {},
14488  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
14489  : pNext( pNext_ )
14490  , flags( flags_ )
14491  , queueFamilyIndex( queueFamilyIndex_ )
14492  {
14493  }
14494 
14496 
14498  : CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
14499  {
14500  }
14501 
14503 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
14504 
14506  {
14507  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
14508  return *this;
14509  }
14510 
14511 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
14513  {
14514  pNext = pNext_;
14515  return *this;
14516  }
14517 
14519  {
14520  flags = flags_;
14521  return *this;
14522  }
14523 
14525  {
14526  queueFamilyIndex = queueFamilyIndex_;
14527  return *this;
14528  }
14529 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
14530 
14532  {
14533  return *reinterpret_cast<const VkCommandPoolCreateInfo *>( this );
14534  }
14535 
14537  {
14538  return *reinterpret_cast<VkCommandPoolCreateInfo *>( this );
14539  }
14540 
14541 #if defined( VULKAN_HPP_USE_REFLECT )
14542 # if 14 <= VULKAN_HPP_CPP_VERSION
14543  auto
14544 # else
14545  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags const &, uint32_t const &>
14546 # endif
14547  reflect() const VULKAN_HPP_NOEXCEPT
14548  {
14549  return std::tie( sType, pNext, flags, queueFamilyIndex );
14550  }
14551 #endif
14552 
14553 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
14554  auto operator<=>( CommandPoolCreateInfo const & ) const = default;
14555 #else
14557  {
14558 # if defined( VULKAN_HPP_USE_REFLECT )
14559  return this->reflect() == rhs.reflect();
14560 # else
14561  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex );
14562 # endif
14563  }
14564 
14566  {
14567  return !operator==( rhs );
14568  }
14569 #endif
14570 
14571  public:
14573  const void * pNext = {};
14575  uint32_t queueFamilyIndex = {};
14576  };
14577 
14578  template <>
14580  {
14582  };
14583 
14585  {
14587 
14588 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
14589  VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT
14590  : constantID( constantID_ )
14591  , offset( offset_ )
14592  , size( size_ )
14593  {
14594  }
14595 
14597 
14599  : SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
14600  {
14601  }
14602 
14604 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
14605 
14607  {
14608  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
14609  return *this;
14610  }
14611 
14612 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
14614  {
14615  constantID = constantID_;
14616  return *this;
14617  }
14618 
14620  {
14621  offset = offset_;
14622  return *this;
14623  }
14624 
14626  {
14627  size = size_;
14628  return *this;
14629  }
14630 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
14631 
14633  {
14634  return *reinterpret_cast<const VkSpecializationMapEntry *>( this );
14635  }
14636 
14638  {
14639  return *reinterpret_cast<VkSpecializationMapEntry *>( this );
14640  }
14641 
14642 #if defined( VULKAN_HPP_USE_REFLECT )
14643 # if 14 <= VULKAN_HPP_CPP_VERSION
14644  auto
14645 # else
14646  std::tuple<uint32_t const &, uint32_t const &, size_t const &>
14647 # endif
14648  reflect() const VULKAN_HPP_NOEXCEPT
14649  {
14650  return std::tie( constantID, offset, size );
14651  }
14652 #endif
14653 
14654 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
14655  auto operator<=>( SpecializationMapEntry const & ) const = default;
14656 #else
14658  {
14659 # if defined( VULKAN_HPP_USE_REFLECT )
14660  return this->reflect() == rhs.reflect();
14661 # else
14662  return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size );
14663 # endif
14664  }
14665 
14667  {
14668  return !operator==( rhs );
14669  }
14670 #endif
14671 
14672  public:
14673  uint32_t constantID = {};
14674  uint32_t offset = {};
14675  size_t size = {};
14676  };
14677 
14679  {
14681 
14682 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
14683  VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {},
14684  const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {},
14685  size_t dataSize_ = {},
14686  const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
14687  : mapEntryCount( mapEntryCount_ )
14688  , pMapEntries( pMapEntries_ )
14689  , dataSize( dataSize_ )
14690  , pData( pData_ )
14691  {
14692  }
14693 
14695 
14696  SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) ) {}
14697 
14698 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
14699  template <typename T>
14702  : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) )
14703  , pMapEntries( mapEntries_.data() )
14704  , dataSize( data_.size() * sizeof( T ) )
14705  , pData( data_.data() )
14706  {
14707  }
14708 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
14709 
14711 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
14712 
14714  {
14715  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
14716  return *this;
14717  }
14718 
14719 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
14721  {
14722  mapEntryCount = mapEntryCount_;
14723  return *this;
14724  }
14725 
14727  {
14728  pMapEntries = pMapEntries_;
14729  return *this;
14730  }
14731 
14732 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
14735  {
14736  mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
14737  pMapEntries = mapEntries_.data();
14738  return *this;
14739  }
14740 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
14741 
14743  {
14744  dataSize = dataSize_;
14745  return *this;
14746  }
14747 
14749  {
14750  pData = pData_;
14751  return *this;
14752  }
14753 
14754 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
14755  template <typename T>
14757  {
14758  dataSize = data_.size() * sizeof( T );
14759  pData = data_.data();
14760  return *this;
14761  }
14762 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
14763 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
14764 
14766  {
14767  return *reinterpret_cast<const VkSpecializationInfo *>( this );
14768  }
14769 
14771  {
14772  return *reinterpret_cast<VkSpecializationInfo *>( this );
14773  }
14774 
14775 #if defined( VULKAN_HPP_USE_REFLECT )
14776 # if 14 <= VULKAN_HPP_CPP_VERSION
14777  auto
14778 # else
14779  std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * const &, size_t const &, const void * const &>
14780 # endif
14781  reflect() const VULKAN_HPP_NOEXCEPT
14782  {
14783  return std::tie( mapEntryCount, pMapEntries, dataSize, pData );
14784  }
14785 #endif
14786 
14787 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
14788  auto operator<=>( SpecializationInfo const & ) const = default;
14789 #else
14791  {
14792 # if defined( VULKAN_HPP_USE_REFLECT )
14793  return this->reflect() == rhs.reflect();
14794 # else
14795  return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
14796 # endif
14797  }
14798 
14800  {
14801  return !operator==( rhs );
14802  }
14803 #endif
14804 
14805  public:
14806  uint32_t mapEntryCount = {};
14808  size_t dataSize = {};
14809  const void * pData = {};
14810  };
14811 
14813  {
14815 
14816  static const bool allowDuplicate = false;
14818 
14819 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
14823  const char * pName_ = {},
14824  const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {},
14825  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
14826  : pNext( pNext_ )
14827  , flags( flags_ )
14828  , stage( stage_ )
14829  , module( module_ )
14830  , pName( pName_ )
14831  , pSpecializationInfo( pSpecializationInfo_ )
14832  {
14833  }
14834 
14836 
14838  : PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
14839  {
14840  }
14841 
14843 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
14844 
14846  {
14847  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
14848  return *this;
14849  }
14850 
14851 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
14853  {
14854  pNext = pNext_;
14855  return *this;
14856  }
14857 
14859  {
14860  flags = flags_;
14861  return *this;
14862  }
14863 
14865  {
14866  stage = stage_;
14867  return *this;
14868  }
14869 
14871  {
14872  module = module_;
14873  return *this;
14874  }
14875 
14877  {
14878  pName = pName_;
14879  return *this;
14880  }
14881 
14884  {
14885  pSpecializationInfo = pSpecializationInfo_;
14886  return *this;
14887  }
14888 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
14889 
14891  {
14892  return *reinterpret_cast<const VkPipelineShaderStageCreateInfo *>( this );
14893  }
14894 
14896  {
14897  return *reinterpret_cast<VkPipelineShaderStageCreateInfo *>( this );
14898  }
14899 
14900 #if defined( VULKAN_HPP_USE_REFLECT )
14901 # if 14 <= VULKAN_HPP_CPP_VERSION
14902  auto
14903 # else
14904  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
14905  const void * const &,
14909  const char * const &,
14911 # endif
14912  reflect() const VULKAN_HPP_NOEXCEPT
14913  {
14914  return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo );
14915  }
14916 #endif
14917 
14918 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
14919  std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
14920  {
14921  if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
14922  return cmp;
14923  if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
14924  return cmp;
14925  if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
14926  return cmp;
14927  if ( auto cmp = stage <=> rhs.stage; cmp != 0 )
14928  return cmp;
14929  if ( auto cmp = module <=> rhs.module; cmp != 0 )
14930  return cmp;
14931  if ( pName != rhs.pName )
14932  if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
14933  return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
14934  if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 )
14935  return cmp;
14936 
14937  return std::strong_ordering::equivalent;
14938  }
14939 #endif
14940 
14942  {
14943  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( module == rhs.module ) &&
14944  ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo );
14945  }
14946 
14948  {
14949  return !operator==( rhs );
14950  }
14951 
14952  public:
14954  const void * pNext = {};
14958  const char * pName = {};
14960  };
14961 
14962  template <>
14964  {
14966  };
14967 
14969  {
14971 
14972  static const bool allowDuplicate = false;
14974 
14975 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
14979  VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
14980  int32_t basePipelineIndex_ = {},
14981  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
14982  : pNext( pNext_ )
14983  , flags( flags_ )
14984  , stage( stage_ )
14985  , layout( layout_ )
14986  , basePipelineHandle( basePipelineHandle_ )
14987  , basePipelineIndex( basePipelineIndex_ )
14988  {
14989  }
14990 
14992 
14994  : ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
14995  {
14996  }
14997 
14999 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
15000 
15002  {
15003  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
15004  return *this;
15005  }
15006 
15007 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
15009  {
15010  pNext = pNext_;
15011  return *this;
15012  }
15013 
15015  {
15016  flags = flags_;
15017  return *this;
15018  }
15019 
15021  {
15022  stage = stage_;
15023  return *this;
15024  }
15025 
15027  {
15028  layout = layout_;
15029  return *this;
15030  }
15031 
15033  {
15034  basePipelineHandle = basePipelineHandle_;
15035  return *this;
15036  }
15037 
15039  {
15040  basePipelineIndex = basePipelineIndex_;
15041  return *this;
15042  }
15043 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
15044 
15046  {
15047  return *reinterpret_cast<const VkComputePipelineCreateInfo *>( this );
15048  }
15049 
15051  {
15052  return *reinterpret_cast<VkComputePipelineCreateInfo *>( this );
15053  }
15054 
15055 #if defined( VULKAN_HPP_USE_REFLECT )
15056 # if 14 <= VULKAN_HPP_CPP_VERSION
15057  auto
15058 # else
15059  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
15060  const void * const &,
15065  int32_t const &>
15066 # endif
15067  reflect() const VULKAN_HPP_NOEXCEPT
15068  {
15070  }
15071 #endif
15072 
15073 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
15074  auto operator<=>( ComputePipelineCreateInfo const & ) const = default;
15075 #else
15077  {
15078 # if defined( VULKAN_HPP_USE_REFLECT )
15079  return this->reflect() == rhs.reflect();
15080 # else
15081  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( layout == rhs.layout ) &&
15082  ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
15083 # endif
15084  }
15085 
15087  {
15088  return !operator==( rhs );
15089  }
15090 #endif
15091 
15092  public:
15094  const void * pNext = {};
15099  int32_t basePipelineIndex = {};
15100  };
15101 
15102  template <>
15104  {
15106  };
15107 
15109  {
15111 
15112  static const bool allowDuplicate = false;
15114 
15115 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
15117  VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
15119  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
15120  : pNext( pNext_ )
15121  , buffer( buffer_ )
15122  , offset( offset_ )
15123  , flags( flags_ )
15124  {
15125  }
15126 
15128 
15130  : ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
15131  {
15132  }
15133 
15135 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
15136 
15138  {
15139  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
15140  return *this;
15141  }
15142 
15143 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
15145  {
15146  pNext = pNext_;
15147  return *this;
15148  }
15149 
15151  {
15152  buffer = buffer_;
15153  return *this;
15154  }
15155 
15157  {
15158  offset = offset_;
15159  return *this;
15160  }
15161 
15163  {
15164  flags = flags_;
15165  return *this;
15166  }
15167 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
15168 
15170  {
15171  return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( this );
15172  }
15173 
15175  {
15176  return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT *>( this );
15177  }
15178 
15179 #if defined( VULKAN_HPP_USE_REFLECT )
15180 # if 14 <= VULKAN_HPP_CPP_VERSION
15181  auto
15182 # else
15183  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
15184  const void * const &,
15188 # endif
15189  reflect() const VULKAN_HPP_NOEXCEPT
15190  {
15191  return std::tie( sType, pNext, buffer, offset, flags );
15192  }
15193 #endif
15194 
15195 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
15196  auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default;
15197 #else
15199  {
15200 # if defined( VULKAN_HPP_USE_REFLECT )
15201  return this->reflect() == rhs.reflect();
15202 # else
15203  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( flags == rhs.flags );
15204 # endif
15205  }
15206 
15208  {
15209  return !operator==( rhs );
15210  }
15211 #endif
15212 
15213  public:
15215  const void * pNext = {};
15219  };
15220 
15221  template <>
15223  {
15225  };
15226 
15228  {
15230 
15231 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
15232  VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT
15233  : major( major_ )
15234  , minor( minor_ )
15235  , subminor( subminor_ )
15236  , patch( patch_ )
15237  {
15238  }
15239 
15241 
15242  ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) ) {}
15243 
15245 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
15246 
15248  {
15249  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
15250  return *this;
15251  }
15252 
15253 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
15255  {
15256  major = major_;
15257  return *this;
15258  }
15259 
15261  {
15262  minor = minor_;
15263  return *this;
15264  }
15265 
15267  {
15268  subminor = subminor_;
15269  return *this;
15270  }
15271 
15273  {
15274  patch = patch_;
15275  return *this;
15276  }
15277 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
15278 
15280  {
15281  return *reinterpret_cast<const VkConformanceVersion *>( this );
15282  }
15283 
15285  {
15286  return *reinterpret_cast<VkConformanceVersion *>( this );
15287  }
15288 
15289 #if defined( VULKAN_HPP_USE_REFLECT )
15290 # if 14 <= VULKAN_HPP_CPP_VERSION
15291  auto
15292 # else
15293  std::tuple<uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &>
15294 # endif
15295  reflect() const VULKAN_HPP_NOEXCEPT
15296  {
15297  return std::tie( major, minor, subminor, patch );
15298  }
15299 #endif
15300 
15301 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
15302  auto operator<=>( ConformanceVersion const & ) const = default;
15303 #else
15305  {
15306 # if defined( VULKAN_HPP_USE_REFLECT )
15307  return this->reflect() == rhs.reflect();
15308 # else
15309  return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch );
15310 # endif
15311  }
15312 
15314  {
15315  return !operator==( rhs );
15316  }
15317 #endif
15318 
15319  public:
15320  uint8_t major = {};
15321  uint8_t minor = {};
15322  uint8_t subminor = {};
15323  uint8_t patch = {};
15324  };
15325  using ConformanceVersionKHR = ConformanceVersion;
15326 
15328  {
15330 
15331  static const bool allowDuplicate = false;
15333 
15334 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
15336  uint32_t NSize_ = {},
15337  uint32_t KSize_ = {},
15343  void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
15344  : pNext( pNext_ )
15345  , MSize( MSize_ )
15346  , NSize( NSize_ )
15347  , KSize( KSize_ )
15348  , AType( AType_ )
15349  , BType( BType_ )
15350  , CType( CType_ )
15351  , DType( DType_ )
15352  , scope( scope_ )
15353  {
15354  }
15355 
15357 
15359  : CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
15360  {
15361  }
15362 
15364 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
15365 
15367  {
15368  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
15369  return *this;
15370  }
15371 
15372 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
15374  {
15375  pNext = pNext_;
15376  return *this;
15377  }
15378 
15380  {
15381  MSize = MSize_;
15382  return *this;
15383  }
15384 
15386  {
15387  NSize = NSize_;
15388  return *this;
15389  }
15390 
15392  {
15393  KSize = KSize_;
15394  return *this;
15395  }
15396 
15398  {
15399  AType = AType_;
15400  return *this;
15401  }
15402 
15404  {
15405  BType = BType_;
15406  return *this;
15407  }
15408 
15410  {
15411  CType = CType_;
15412  return *this;
15413  }
15414 
15416  {
15417  DType = DType_;
15418  return *this;
15419  }
15420 
15422  {
15423  scope = scope_;
15424  return *this;
15425  }
15426 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
15427 
15429  {
15430  return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV *>( this );
15431  }
15432 
15434  {
15435  return *reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( this );
15436  }
15437 
15438 #if defined( VULKAN_HPP_USE_REFLECT )
15439 # if 14 <= VULKAN_HPP_CPP_VERSION
15440  auto
15441 # else
15442  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
15443  void * const &,
15444  uint32_t const &,
15445  uint32_t const &,
15446  uint32_t const &,
15452 # endif
15453  reflect() const VULKAN_HPP_NOEXCEPT
15454  {
15455  return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope );
15456  }
15457 #endif
15458 
15459 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
15460  auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default;
15461 #else
15463  {
15464 # if defined( VULKAN_HPP_USE_REFLECT )
15465  return this->reflect() == rhs.reflect();
15466 # else
15467  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) &&
15468  ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( DType == rhs.DType ) && ( scope == rhs.scope );
15469 # endif
15470  }
15471 
15473  {
15474  return !operator==( rhs );
15475  }
15476 #endif
15477 
15478  public:
15480  void * pNext = {};
15481  uint32_t MSize = {};
15482  uint32_t NSize = {};
15483  uint32_t KSize = {};
15489  };
15490 
15491  template <>
15493  {
15495  };
15496 
15498  {
15500 
15501  static const bool allowDuplicate = false;
15503 
15504 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
15509  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
15510  : pNext( pNext_ )
15511  , src( src_ )
15512  , dst( dst_ )
15513  , mode( mode_ )
15514  {
15515  }
15516 
15518 
15520  : CopyAccelerationStructureInfoKHR( *reinterpret_cast<CopyAccelerationStructureInfoKHR const *>( &rhs ) )
15521  {
15522  }
15523 
15525 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
15526 
15528  {
15529  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const *>( &rhs );
15530  return *this;
15531  }
15532 
15533 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
15535  {
15536  pNext = pNext_;
15537  return *this;
15538  }
15539 
15541  {
15542  src = src_;
15543  return *this;
15544  }
15545 
15547  {
15548  dst = dst_;
15549  return *this;
15550  }
15551 
15553  {
15554  mode = mode_;
15555  return *this;
15556  }
15557 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
15558 
15560  {
15561  return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( this );
15562  }
15563 
15565  {
15566  return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR *>( this );
15567  }
15568 
15569 #if defined( VULKAN_HPP_USE_REFLECT )
15570 # if 14 <= VULKAN_HPP_CPP_VERSION
15571  auto
15572 # else
15573  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
15574  const void * const &,
15578 # endif
15579  reflect() const VULKAN_HPP_NOEXCEPT
15580  {
15581  return std::tie( sType, pNext, src, dst, mode );
15582  }
15583 #endif
15584 
15585 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
15586  auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default;
15587 #else
15589  {
15590 # if defined( VULKAN_HPP_USE_REFLECT )
15591  return this->reflect() == rhs.reflect();
15592 # else
15593  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode );
15594 # endif
15595  }
15596 
15598  {
15599  return !operator==( rhs );
15600  }
15601 #endif
15602 
15603  public:
15605  const void * pNext = {};
15609  };
15610 
15611  template <>
15613  {
15615  };
15616 
15618  {
15620 
15621  static const bool allowDuplicate = false;
15623 
15624 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
15629  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
15630  : pNext( pNext_ )
15631  , src( src_ )
15632  , dst( dst_ )
15633  , mode( mode_ )
15634  {
15635  }
15636 
15638 
15640  : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs ) )
15641  {
15642  }
15643 
15645 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
15646 
15648  {
15649  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
15650  return *this;
15651  }
15652 
15653 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
15655  {
15656  pNext = pNext_;
15657  return *this;
15658  }
15659 
15661  {
15662  src = src_;
15663  return *this;
15664  }
15665 
15667  {
15668  dst = dst_;
15669  return *this;
15670  }
15671 
15674  {
15675  mode = mode_;
15676  return *this;
15677  }
15678 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
15679 
15681  {
15682  return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
15683  }
15684 
15686  {
15687  return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
15688  }
15689 
15690 #if defined( VULKAN_HPP_USE_REFLECT )
15691 # if 14 <= VULKAN_HPP_CPP_VERSION
15692  auto
15693 # else
15694  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
15695  const void * const &,
15699 # endif
15700  reflect() const VULKAN_HPP_NOEXCEPT
15701  {
15702  return std::tie( sType, pNext, src, dst, mode );
15703  }
15704 #endif
15705 
15706  public:
15708  const void * pNext = {};
15712  };
15713 
15714  template <>
15716  {
15718  };
15719 
15721  {
15723 
15724  static const bool allowDuplicate = false;
15726 
15727 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
15729  VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {},
15730  uint32_t regionCount_ = {},
15731  const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {},
15732  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
15733  : pNext( pNext_ )
15734  , srcBuffer( srcBuffer_ )
15735  , dstBuffer( dstBuffer_ )
15736  , regionCount( regionCount_ )
15737  , pRegions( pRegions_ )
15738  {
15739  }
15740 
15742 
15743  CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2( *reinterpret_cast<CopyBufferInfo2 const *>( &rhs ) ) {}
15744 
15745 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
15747  VULKAN_HPP_NAMESPACE::Buffer dstBuffer_,
15749  const void * pNext_ = nullptr )
15750  : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
15751  {
15752  }
15753 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
15754 
15755  CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
15756 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
15757 
15759  {
15760  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferInfo2 const *>( &rhs );
15761  return *this;
15762  }
15763 
15764 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
15766  {
15767  pNext = pNext_;
15768  return *this;
15769  }
15770 
15772  {
15773  srcBuffer = srcBuffer_;
15774  return *this;
15775  }
15776 
15778  {
15779  dstBuffer = dstBuffer_;
15780  return *this;
15781  }
15782 
15784  {
15785  regionCount = regionCount_;
15786  return *this;
15787  }
15788 
15790  {
15791  pRegions = pRegions_;
15792  return *this;
15793  }
15794 
15795 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
15797  {
15798  regionCount = static_cast<uint32_t>( regions_.size() );
15799  pRegions = regions_.data();
15800  return *this;
15801  }
15802 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
15803 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
15804 
15805  operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
15806  {
15807  return *reinterpret_cast<const VkCopyBufferInfo2 *>( this );
15808  }
15809 
15811  {
15812  return *reinterpret_cast<VkCopyBufferInfo2 *>( this );
15813  }
15814 
15815 #if defined( VULKAN_HPP_USE_REFLECT )
15816 # if 14 <= VULKAN_HPP_CPP_VERSION
15817  auto
15818 # else
15819  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
15820  const void * const &,
15823  uint32_t const &,
15824  const VULKAN_HPP_NAMESPACE::BufferCopy2 * const &>
15825 # endif
15826  reflect() const VULKAN_HPP_NOEXCEPT
15827  {
15828  return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions );
15829  }
15830 #endif
15831 
15832 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
15833  auto operator<=>( CopyBufferInfo2 const & ) const = default;
15834 #else
15836  {
15837 # if defined( VULKAN_HPP_USE_REFLECT )
15838  return this->reflect() == rhs.reflect();
15839 # else
15840  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstBuffer == rhs.dstBuffer ) &&
15841  ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
15842 # endif
15843  }
15844 
15846  {
15847  return !operator==( rhs );
15848  }
15849 #endif
15850 
15851  public:
15853  const void * pNext = {};
15856  uint32_t regionCount = {};
15858  };
15859 
15860  template <>
15862  {
15864  };
15866 
15868  {
15870 
15871  static const bool allowDuplicate = false;
15873 
15874 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
15876  VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
15878  uint32_t regionCount_ = {},
15879  const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {},
15880  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
15881  : pNext( pNext_ )
15882  , srcBuffer( srcBuffer_ )
15883  , dstImage( dstImage_ )
15884  , dstImageLayout( dstImageLayout_ )
15885  , regionCount( regionCount_ )
15886  , pRegions( pRegions_ )
15887  {
15888  }
15889 
15891 
15893  : CopyBufferToImageInfo2( *reinterpret_cast<CopyBufferToImageInfo2 const *>( &rhs ) )
15894  {
15895  }
15896 
15897 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
15899  VULKAN_HPP_NAMESPACE::Image dstImage_,
15900  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
15902  const void * pNext_ = nullptr )
15903  : pNext( pNext_ )
15904  , srcBuffer( srcBuffer_ )
15905  , dstImage( dstImage_ )
15906  , dstImageLayout( dstImageLayout_ )
15907  , regionCount( static_cast<uint32_t>( regions_.size() ) )
15908  , pRegions( regions_.data() )
15909  {
15910  }
15911 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
15912 
15914 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
15915 
15917  {
15918  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const *>( &rhs );
15919  return *this;
15920  }
15921 
15922 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
15924  {
15925  pNext = pNext_;
15926  return *this;
15927  }
15928 
15930  {
15931  srcBuffer = srcBuffer_;
15932  return *this;
15933  }
15934 
15936  {
15937  dstImage = dstImage_;
15938  return *this;
15939  }
15940 
15942  {
15943  dstImageLayout = dstImageLayout_;
15944  return *this;
15945  }
15946 
15948  {
15949  regionCount = regionCount_;
15950  return *this;
15951  }
15952 
15954  {
15955  pRegions = pRegions_;
15956  return *this;
15957  }
15958 
15959 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
15962  {
15963  regionCount = static_cast<uint32_t>( regions_.size() );
15964  pRegions = regions_.data();
15965  return *this;
15966  }
15967 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
15968 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
15969 
15971  {
15972  return *reinterpret_cast<const VkCopyBufferToImageInfo2 *>( this );
15973  }
15974 
15976  {
15977  return *reinterpret_cast<VkCopyBufferToImageInfo2 *>( this );
15978  }
15979 
15980 #if defined( VULKAN_HPP_USE_REFLECT )
15981 # if 14 <= VULKAN_HPP_CPP_VERSION
15982  auto
15983 # else
15984  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
15985  const void * const &,
15989  uint32_t const &,
15991 # endif
15992  reflect() const VULKAN_HPP_NOEXCEPT
15993  {
15994  return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
15995  }
15996 #endif
15997 
15998 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
15999  auto operator<=>( CopyBufferToImageInfo2 const & ) const = default;
16000 #else
16002  {
16003 # if defined( VULKAN_HPP_USE_REFLECT )
16004  return this->reflect() == rhs.reflect();
16005 # else
16006  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstImage == rhs.dstImage ) &&
16007  ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
16008 # endif
16009  }
16010 
16012  {
16013  return !operator==( rhs );
16014  }
16015 #endif
16016 
16017  public:
16019  const void * pNext = {};
16023  uint32_t regionCount = {};
16025  };
16026 
16027  template <>
16029  {
16031  };
16033 
16035  {
16037 
16038  static const bool allowDuplicate = false;
16040 
16041 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
16044  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
16045  : pNext( pNext_ )
16046  , transform( transform_ )
16047  {
16048  }
16049 
16051 
16053  : CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
16054  {
16055  }
16056 
16058 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
16059 
16061  {
16062  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
16063  return *this;
16064  }
16065 
16066 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
16068  {
16069  pNext = pNext_;
16070  return *this;
16071  }
16072 
16074  {
16075  transform = transform_;
16076  return *this;
16077  }
16078 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
16079 
16081  {
16082  return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM *>( this );
16083  }
16084 
16086  {
16087  return *reinterpret_cast<VkCopyCommandTransformInfoQCOM *>( this );
16088  }
16089 
16090 #if defined( VULKAN_HPP_USE_REFLECT )
16091 # if 14 <= VULKAN_HPP_CPP_VERSION
16092  auto
16093 # else
16094  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
16095 # endif
16096  reflect() const VULKAN_HPP_NOEXCEPT
16097  {
16098  return std::tie( sType, pNext, transform );
16099  }
16100 #endif
16101 
16102 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
16103  auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default;
16104 #else
16106  {
16107 # if defined( VULKAN_HPP_USE_REFLECT )
16108  return this->reflect() == rhs.reflect();
16109 # else
16110  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform );
16111 # endif
16112  }
16113 
16115  {
16116  return !operator==( rhs );
16117  }
16118 #endif
16119 
16120  public:
16122  const void * pNext = {};
16124  };
16125 
16126  template <>
16128  {
16130  };
16131 
16133  {
16135 
16136  static const bool allowDuplicate = false;
16138 
16139 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
16141  uint32_t srcBinding_ = {},
16142  uint32_t srcArrayElement_ = {},
16144  uint32_t dstBinding_ = {},
16145  uint32_t dstArrayElement_ = {},
16146  uint32_t descriptorCount_ = {},
16147  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
16148  : pNext( pNext_ )
16149  , srcSet( srcSet_ )
16150  , srcBinding( srcBinding_ )
16151  , srcArrayElement( srcArrayElement_ )
16152  , dstSet( dstSet_ )
16153  , dstBinding( dstBinding_ )
16154  , dstArrayElement( dstArrayElement_ )
16155  , descriptorCount( descriptorCount_ )
16156  {
16157  }
16158 
16160 
16161  CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) ) {}
16162 
16164 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
16165 
16167  {
16168  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
16169  return *this;
16170  }
16171 
16172 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
16174  {
16175  pNext = pNext_;
16176  return *this;
16177  }
16178 
16180  {
16181  srcSet = srcSet_;
16182  return *this;
16183  }
16184 
16186  {
16187  srcBinding = srcBinding_;
16188  return *this;
16189  }
16190 
16192  {
16193  srcArrayElement = srcArrayElement_;
16194  return *this;
16195  }
16196 
16198  {
16199  dstSet = dstSet_;
16200  return *this;
16201  }
16202 
16204  {
16205  dstBinding = dstBinding_;
16206  return *this;
16207  }
16208 
16210  {
16211  dstArrayElement = dstArrayElement_;
16212  return *this;
16213  }
16214 
16216  {
16217  descriptorCount = descriptorCount_;
16218  return *this;
16219  }
16220 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
16221 
16222  operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
16223  {
16224  return *reinterpret_cast<const VkCopyDescriptorSet *>( this );
16225  }
16226 
16228  {
16229  return *reinterpret_cast<VkCopyDescriptorSet *>( this );
16230  }
16231 
16232 #if defined( VULKAN_HPP_USE_REFLECT )
16233 # if 14 <= VULKAN_HPP_CPP_VERSION
16234  auto
16235 # else
16236  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
16237  const void * const &,
16239  uint32_t const &,
16240  uint32_t const &,
16242  uint32_t const &,
16243  uint32_t const &,
16244  uint32_t const &>
16245 # endif
16246  reflect() const VULKAN_HPP_NOEXCEPT
16247  {
16249  }
16250 #endif
16251 
16252 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
16253  auto operator<=>( CopyDescriptorSet const & ) const = default;
16254 #else
16256  {
16257 # if defined( VULKAN_HPP_USE_REFLECT )
16258  return this->reflect() == rhs.reflect();
16259 # else
16260  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && ( srcBinding == rhs.srcBinding ) &&
16261  ( srcArrayElement == rhs.srcArrayElement ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) &&
16262  ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount );
16263 # endif
16264  }
16265 
16267  {
16268  return !operator==( rhs );
16269  }
16270 #endif
16271 
16272  public:
16274  const void * pNext = {};
16276  uint32_t srcBinding = {};
16277  uint32_t srcArrayElement = {};
16279  uint32_t dstBinding = {};
16280  uint32_t dstArrayElement = {};
16281  uint32_t descriptorCount = {};
16282  };
16283 
16284  template <>
16286  {
16288  };
16289 
16290  struct ImageCopy2
16291  {
16293 
16294  static const bool allowDuplicate = false;
16296 
16297 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
16299  VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
16300  VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
16301  VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
16302  VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
16303  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
16304  : pNext( pNext_ )
16305  , srcSubresource( srcSubresource_ )
16306  , srcOffset( srcOffset_ )
16307  , dstSubresource( dstSubresource_ )
16308  , dstOffset( dstOffset_ )
16309  , extent( extent_ )
16310  {
16311  }
16312 
16314 
16315  ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy2( *reinterpret_cast<ImageCopy2 const *>( &rhs ) ) {}
16316 
16317  ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
16318 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
16319 
16321  {
16322  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2 const *>( &rhs );
16323  return *this;
16324  }
16325 
16326 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
16328  {
16329  pNext = pNext_;
16330  return *this;
16331  }
16332 
16334  {
16335  srcSubresource = srcSubresource_;
16336  return *this;
16337  }
16338 
16340  {
16341  srcOffset = srcOffset_;
16342  return *this;
16343  }
16344 
16346  {
16347  dstSubresource = dstSubresource_;
16348  return *this;
16349  }
16350 
16352  {
16353  dstOffset = dstOffset_;
16354  return *this;
16355  }
16356 
16358  {
16359  extent = extent_;
16360  return *this;
16361  }
16362 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
16363 
16364  operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
16365  {
16366  return *reinterpret_cast<const VkImageCopy2 *>( this );
16367  }
16368 
16370  {
16371  return *reinterpret_cast<VkImageCopy2 *>( this );
16372  }
16373 
16374 #if defined( VULKAN_HPP_USE_REFLECT )
16375 # if 14 <= VULKAN_HPP_CPP_VERSION
16376  auto
16377 # else
16378  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
16379  const void * const &,
16385 # endif
16386  reflect() const VULKAN_HPP_NOEXCEPT
16387  {
16389  }
16390 #endif
16391 
16392 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
16393  auto operator<=>( ImageCopy2 const & ) const = default;
16394 #else
16395  bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
16396  {
16397 # if defined( VULKAN_HPP_USE_REFLECT )
16398  return this->reflect() == rhs.reflect();
16399 # else
16400  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
16401  ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
16402 # endif
16403  }
16404 
16405  bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
16406  {
16407  return !operator==( rhs );
16408  }
16409 #endif
16410 
16411  public:
16413  const void * pNext = {};
16419  };
16420 
16421  template <>
16423  {
16424  using Type = ImageCopy2;
16425  };
16426  using ImageCopy2KHR = ImageCopy2;
16427 
16429  {
16431 
16432  static const bool allowDuplicate = false;
16434 
16435 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
16438  VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
16440  uint32_t regionCount_ = {},
16441  const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {},
16442  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
16443  : pNext( pNext_ )
16444  , srcImage( srcImage_ )
16445  , srcImageLayout( srcImageLayout_ )
16446  , dstImage( dstImage_ )
16447  , dstImageLayout( dstImageLayout_ )
16448  , regionCount( regionCount_ )
16449  , pRegions( pRegions_ )
16450  {
16451  }
16452 
16454 
16455  CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageInfo2( *reinterpret_cast<CopyImageInfo2 const *>( &rhs ) ) {}
16456 
16457 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
16459  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
16460  VULKAN_HPP_NAMESPACE::Image dstImage_,
16461  VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
16463  const void * pNext_ = nullptr )
16464  : pNext( pNext_ )
16465  , srcImage( srcImage_ )
16466  , srcImageLayout( srcImageLayout_ )
16467  , dstImage( dstImage_ )
16468  , dstImageLayout( dstImageLayout_ )
16469  , regionCount( static_cast<uint32_t>( regions_.size() ) )
16470  , pRegions( regions_.data() )
16471  {
16472  }
16473 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16474 
16475  CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
16476 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
16477 
16479  {
16480  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2 const *>( &rhs );
16481  return *this;
16482  }
16483 
16484 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
16486  {
16487  pNext = pNext_;
16488  return *this;
16489  }
16490 
16492  {
16493  srcImage = srcImage_;
16494  return *this;
16495  }
16496 
16498  {
16499  srcImageLayout = srcImageLayout_;
16500  return *this;
16501  }
16502 
16504  {
16505  dstImage = dstImage_;
16506  return *this;
16507  }
16508 
16510  {
16511  dstImageLayout = dstImageLayout_;
16512  return *this;
16513  }
16514 
16516  {
16517  regionCount = regionCount_;
16518  return *this;
16519  }
16520 
16522  {
16523  pRegions = pRegions_;
16524  return *this;
16525  }
16526 
16527 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
16529  {
16530  regionCount = static_cast<uint32_t>( regions_.size() );
16531  pRegions = regions_.data();
16532  return *this;
16533  }
16534 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16535 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
16536 
16537  operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
16538  {
16539  return *reinterpret_cast<const VkCopyImageInfo2 *>( this );
16540  }
16541 
16543  {
16544  return *reinterpret_cast<VkCopyImageInfo2 *>( this );
16545  }
16546 
16547 #if defined( VULKAN_HPP_USE_REFLECT )
16548 # if 14 <= VULKAN_HPP_CPP_VERSION
16549  auto
16550 # else
16551  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
16552  const void * const &,
16557  uint32_t const &,
16558  const VULKAN_HPP_NAMESPACE::ImageCopy2 * const &>
16559 # endif
16560  reflect() const VULKAN_HPP_NOEXCEPT
16561  {
16563  }
16564 #endif
16565 
16566 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
16567  auto operator<=>( CopyImageInfo2 const & ) const = default;
16568 #else
16570  {
16571 # if defined( VULKAN_HPP_USE_REFLECT )
16572  return this->reflect() == rhs.reflect();
16573 # else
16574  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
16575  ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
16576 # endif
16577  }
16578 
16580  {
16581  return !operator==( rhs );
16582  }
16583 #endif
16584 
16585  public:
16587  const void * pNext = {};
16592  uint32_t regionCount = {};
16594  };
16595 
16596  template <>
16598  {
16600  };
16602 
16604  {
16606 
16607  static const bool allowDuplicate = false;
16609 
16610 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
16613  VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {},
16614  uint32_t regionCount_ = {},
16615  const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {},
16616  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
16617  : pNext( pNext_ )
16618  , srcImage( srcImage_ )
16619  , srcImageLayout( srcImageLayout_ )
16620  , dstBuffer( dstBuffer_ )
16621  , regionCount( regionCount_ )
16622  , pRegions( pRegions_ )
16623  {
16624  }
16625 
16627 
16629  : CopyImageToBufferInfo2( *reinterpret_cast<CopyImageToBufferInfo2 const *>( &rhs ) )
16630  {
16631  }
16632 
16633 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
16635  VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
16636  VULKAN_HPP_NAMESPACE::Buffer dstBuffer_,
16638  const void * pNext_ = nullptr )
16639  : pNext( pNext_ )
16640  , srcImage( srcImage_ )
16641  , srcImageLayout( srcImageLayout_ )
16642  , dstBuffer( dstBuffer_ )
16643  , regionCount( static_cast<uint32_t>( regions_.size() ) )
16644  , pRegions( regions_.data() )
16645  {
16646  }
16647 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16648 
16650 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
16651 
16653  {
16654  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const *>( &rhs );
16655  return *this;
16656  }
16657 
16658 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
16660  {
16661  pNext = pNext_;
16662  return *this;
16663  }
16664 
16666  {
16667  srcImage = srcImage_;
16668  return *this;
16669  }
16670 
16672  {
16673  srcImageLayout = srcImageLayout_;
16674  return *this;
16675  }
16676 
16678  {
16679  dstBuffer = dstBuffer_;
16680  return *this;
16681  }
16682 
16684  {
16685  regionCount = regionCount_;
16686  return *this;
16687  }
16688 
16690  {
16691  pRegions = pRegions_;
16692  return *this;
16693  }
16694 
16695 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
16698  {
16699  regionCount = static_cast<uint32_t>( regions_.size() );
16700  pRegions = regions_.data();
16701  return *this;
16702  }
16703 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16704 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
16705 
16707  {
16708  return *reinterpret_cast<const VkCopyImageToBufferInfo2 *>( this );
16709  }
16710 
16712  {
16713  return *reinterpret_cast<VkCopyImageToBufferInfo2 *>( this );
16714  }
16715 
16716 #if defined( VULKAN_HPP_USE_REFLECT )
16717 # if 14 <= VULKAN_HPP_CPP_VERSION
16718  auto
16719 # else
16720  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
16721  const void * const &,
16725  uint32_t const &,
16727 # endif
16728  reflect() const VULKAN_HPP_NOEXCEPT
16729  {
16730  return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
16731  }
16732 #endif
16733 
16734 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
16735  auto operator<=>( CopyImageToBufferInfo2 const & ) const = default;
16736 #else
16738  {
16739 # if defined( VULKAN_HPP_USE_REFLECT )
16740  return this->reflect() == rhs.reflect();
16741 # else
16742  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
16743  ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
16744 # endif
16745  }
16746 
16748  {
16749  return !operator==( rhs );
16750  }
16751 #endif
16752 
16753  public:
16755  const void * pNext = {};
16759  uint32_t regionCount = {};
16761  };
16762 
16763  template <>
16765  {
16767  };
16769 
16771  {
16773 
16774 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
16776  VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {},
16778  : srcAddress( srcAddress_ )
16779  , dstAddress( dstAddress_ )
16780  , size( size_ )
16781  {
16782  }
16783 
16785 
16787  : CopyMemoryIndirectCommandNV( *reinterpret_cast<CopyMemoryIndirectCommandNV const *>( &rhs ) )
16788  {
16789  }
16790 
16792 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
16793 
16795  {
16796  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV const *>( &rhs );
16797  return *this;
16798  }
16799 
16800 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
16802  {
16803  srcAddress = srcAddress_;
16804  return *this;
16805  }
16806 
16808  {
16809  dstAddress = dstAddress_;
16810  return *this;
16811  }
16812 
16814  {
16815  size = size_;
16816  return *this;
16817  }
16818 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
16819 
16821  {
16822  return *reinterpret_cast<const VkCopyMemoryIndirectCommandNV *>( this );
16823  }
16824 
16826  {
16827  return *reinterpret_cast<VkCopyMemoryIndirectCommandNV *>( this );
16828  }
16829 
16830 #if defined( VULKAN_HPP_USE_REFLECT )
16831 # if 14 <= VULKAN_HPP_CPP_VERSION
16832  auto
16833 # else
16834  std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
16835 # endif
16836  reflect() const VULKAN_HPP_NOEXCEPT
16837  {
16838  return std::tie( srcAddress, dstAddress, size );
16839  }
16840 #endif
16841 
16842 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
16843  auto operator<=>( CopyMemoryIndirectCommandNV const & ) const = default;
16844 #else
16846  {
16847 # if defined( VULKAN_HPP_USE_REFLECT )
16848  return this->reflect() == rhs.reflect();
16849 # else
16850  return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( size == rhs.size );
16851 # endif
16852  }
16853 
16855  {
16856  return !operator==( rhs );
16857  }
16858 #endif
16859 
16860  public:
16864  };
16865 
16867  {
16869 
16870  static const bool allowDuplicate = false;
16872 
16873 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
16878  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
16879  : pNext( pNext_ )
16880  , src( src_ )
16881  , dst( dst_ )
16882  , mode( mode_ )
16883  {
16884  }
16885 
16887 
16889  : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
16890  {
16891  }
16892 
16894 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
16895 
16897  {
16898  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
16899  return *this;
16900  }
16901 
16902 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
16904  {
16905  pNext = pNext_;
16906  return *this;
16907  }
16908 
16911  {
16912  src = src_;
16913  return *this;
16914  }
16915 
16917  {
16918  dst = dst_;
16919  return *this;
16920  }
16921 
16924  {
16925  mode = mode_;
16926  return *this;
16927  }
16928 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
16929 
16931  {
16932  return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
16933  }
16934 
16936  {
16937  return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
16938  }
16939 
16940 #if defined( VULKAN_HPP_USE_REFLECT )
16941 # if 14 <= VULKAN_HPP_CPP_VERSION
16942  auto
16943 # else
16944  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
16945  const void * const &,
16949 # endif
16950  reflect() const VULKAN_HPP_NOEXCEPT
16951  {
16952  return std::tie( sType, pNext, src, dst, mode );
16953  }
16954 #endif
16955 
16956  public:
16958  const void * pNext = {};
16962  };
16963 
16964  template <>
16966  {
16968  };
16969 
16971  {
16973 
16974 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
16976  uint32_t bufferRowLength_ = {},
16977  uint32_t bufferImageHeight_ = {},
16978  VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
16979  VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
16981  : srcAddress( srcAddress_ )
16982  , bufferRowLength( bufferRowLength_ )
16983  , bufferImageHeight( bufferImageHeight_ )
16984  , imageSubresource( imageSubresource_ )
16985  , imageOffset( imageOffset_ )
16986  , imageExtent( imageExtent_ )
16987  {
16988  }
16989 
16991 
16993  : CopyMemoryToImageIndirectCommandNV( *reinterpret_cast<CopyMemoryToImageIndirectCommandNV const *>( &rhs ) )
16994  {
16995  }
16996 
16998 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
16999 
17001  {
17002  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV const *>( &rhs );
17003  return *this;
17004  }
17005 
17006 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
17008  {
17009  srcAddress = srcAddress_;
17010  return *this;
17011  }
17012 
17014  {
17015  bufferRowLength = bufferRowLength_;
17016  return *this;
17017  }
17018 
17020  {
17021  bufferImageHeight = bufferImageHeight_;
17022  return *this;
17023  }
17024 
17027  {
17028  imageSubresource = imageSubresource_;
17029  return *this;
17030  }
17031 
17033  {
17034  imageOffset = imageOffset_;
17035  return *this;
17036  }
17037 
17039  {
17040  imageExtent = imageExtent_;
17041  return *this;
17042  }
17043 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
17044 
17046  {
17047  return *reinterpret_cast<const VkCopyMemoryToImageIndirectCommandNV *>( this );
17048  }
17049 
17051  {
17052  return *reinterpret_cast<VkCopyMemoryToImageIndirectCommandNV *>( this );
17053  }
17054 
17055 #if defined( VULKAN_HPP_USE_REFLECT )
17056 # if 14 <= VULKAN_HPP_CPP_VERSION
17057  auto
17058 # else
17059  std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &,
17060  uint32_t const &,
17061  uint32_t const &,
17065 # endif
17066  reflect() const VULKAN_HPP_NOEXCEPT
17067  {
17069  }
17070 #endif
17071 
17072 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
17073  auto operator<=>( CopyMemoryToImageIndirectCommandNV const & ) const = default;
17074 #else
17076  {
17077 # if defined( VULKAN_HPP_USE_REFLECT )
17078  return this->reflect() == rhs.reflect();
17079 # else
17080  return ( srcAddress == rhs.srcAddress ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) &&
17081  ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent );
17082 # endif
17083  }
17084 
17086  {
17087  return !operator==( rhs );
17088  }
17089 #endif
17090 
17091  public:
17093  uint32_t bufferRowLength = {};
17094  uint32_t bufferImageHeight = {};
17098  };
17099 
17101  {
17103 
17104  static const bool allowDuplicate = false;
17106 
17107 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
17111  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
17112  : pNext( pNext_ )
17113  , src( src_ )
17114  , dst( dst_ )
17115  , mode( mode_ )
17116  {
17117  }
17118 
17120 
17122  : CopyMemoryToMicromapInfoEXT( *reinterpret_cast<CopyMemoryToMicromapInfoEXT const *>( &rhs ) )
17123  {
17124  }
17125 
17127 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
17128 
17130  {
17131  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT const *>( &rhs );
17132  return *this;
17133  }
17134 
17135 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
17137  {
17138  pNext = pNext_;
17139  return *this;
17140  }
17141 
17143  {
17144  src = src_;
17145  return *this;
17146  }
17147 
17149  {
17150  dst = dst_;
17151  return *this;
17152  }
17153 
17155  {
17156  mode = mode_;
17157  return *this;
17158  }
17159 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
17160 
17162  {
17163  return *reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( this );
17164  }
17165 
17167  {
17168  return *reinterpret_cast<VkCopyMemoryToMicromapInfoEXT *>( this );
17169  }
17170 
17171 #if defined( VULKAN_HPP_USE_REFLECT )
17172 # if 14 <= VULKAN_HPP_CPP_VERSION
17173  auto
17174 # else
17175  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
17176  const void * const &,
17180 # endif
17181  reflect() const VULKAN_HPP_NOEXCEPT
17182  {
17183  return std::tie( sType, pNext, src, dst, mode );
17184  }
17185 #endif
17186 
17187  public:
17189  const void * pNext = {};
17193  };
17194 
17195  template <>
17197  {
17199  };
17200 
17202  {
17204 
17205  static const bool allowDuplicate = false;
17207 
17208 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
17212  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
17213  : pNext( pNext_ )
17214  , src( src_ )
17215  , dst( dst_ )
17216  , mode( mode_ )
17217  {
17218  }
17219 
17221 
17222  CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMicromapInfoEXT( *reinterpret_cast<CopyMicromapInfoEXT const *>( &rhs ) )
17223  {
17224  }
17225 
17227 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
17228 
17230  {
17231  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const *>( &rhs );
17232  return *this;
17233  }
17234 
17235 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
17237  {
17238  pNext = pNext_;
17239  return *this;
17240  }
17241 
17243  {
17244  src = src_;
17245  return *this;
17246  }
17247 
17249  {
17250  dst = dst_;
17251  return *this;
17252  }
17253 
17255  {
17256  mode = mode_;
17257  return *this;
17258  }
17259 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
17260 
17262  {
17263  return *reinterpret_cast<const VkCopyMicromapInfoEXT *>( this );
17264  }
17265 
17267  {
17268  return *reinterpret_cast<VkCopyMicromapInfoEXT *>( this );
17269  }
17270 
17271 #if defined( VULKAN_HPP_USE_REFLECT )
17272 # if 14 <= VULKAN_HPP_CPP_VERSION
17273  auto
17274 # else
17275  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
17276  const void * const &,
17280 # endif
17281  reflect() const VULKAN_HPP_NOEXCEPT
17282  {
17283  return std::tie( sType, pNext, src, dst, mode );
17284  }
17285 #endif
17286 
17287 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
17288  auto operator<=>( CopyMicromapInfoEXT const & ) const = default;
17289 #else
17291  {
17292 # if defined( VULKAN_HPP_USE_REFLECT )
17293  return this->reflect() == rhs.reflect();
17294 # else
17295  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode );
17296 # endif
17297  }
17298 
17300  {
17301  return !operator==( rhs );
17302  }
17303 #endif
17304 
17305  public:
17307  const void * pNext = {};
17311  };
17312 
17313  template <>
17315  {
17317  };
17318 
17320  {
17322 
17323  static const bool allowDuplicate = false;
17325 
17326 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
17330  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
17331  : pNext( pNext_ )
17332  , src( src_ )
17333  , dst( dst_ )
17334  , mode( mode_ )
17335  {
17336  }
17337 
17339 
17341  : CopyMicromapToMemoryInfoEXT( *reinterpret_cast<CopyMicromapToMemoryInfoEXT const *>( &rhs ) )
17342  {
17343  }
17344 
17346 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
17347 
17349  {
17350  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT const *>( &rhs );
17351  return *this;
17352  }
17353 
17354 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
17356  {
17357  pNext = pNext_;
17358  return *this;
17359  }
17360 
17362  {
17363  src = src_;
17364  return *this;
17365  }
17366 
17368  {
17369  dst = dst_;
17370  return *this;
17371  }
17372 
17374  {
17375  mode = mode_;
17376  return *this;
17377  }
17378 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
17379 
17381  {
17382  return *reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( this );
17383  }
17384 
17386  {
17387  return *reinterpret_cast<VkCopyMicromapToMemoryInfoEXT *>( this );
17388  }
17389 
17390 #if defined( VULKAN_HPP_USE_REFLECT )
17391 # if 14 <= VULKAN_HPP_CPP_VERSION
17392  auto
17393 # else
17394  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
17395  const void * const &,
17399 # endif
17400  reflect() const VULKAN_HPP_NOEXCEPT
17401  {
17402  return std::tie( sType, pNext, src, dst, mode );
17403  }
17404 #endif
17405 
17406  public:
17408  const void * pNext = {};
17412  };
17413 
17414  template <>
17416  {
17418  };
17419 
17421  {
17423 
17424  static const bool allowDuplicate = false;
17426 
17427 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
17429  CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
17430  : pNext( pNext_ )
17431  , module( module_ )
17432  , pName( pName_ )
17433  {
17434  }
17435 
17437 
17439  : CuFunctionCreateInfoNVX( *reinterpret_cast<CuFunctionCreateInfoNVX const *>( &rhs ) )
17440  {
17441  }
17442 
17444 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
17445 
17447  {
17448  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const *>( &rhs );
17449  return *this;
17450  }
17451 
17452 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
17454  {
17455  pNext = pNext_;
17456  return *this;
17457  }
17458 
17460  {
17461  module = module_;
17462  return *this;
17463  }
17464 
17466  {
17467  pName = pName_;
17468  return *this;
17469  }
17470 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
17471 
17473  {
17474  return *reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( this );
17475  }
17476 
17478  {
17479  return *reinterpret_cast<VkCuFunctionCreateInfoNVX *>( this );
17480  }
17481 
17482 #if defined( VULKAN_HPP_USE_REFLECT )
17483 # if 14 <= VULKAN_HPP_CPP_VERSION
17484  auto
17485 # else
17486  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CuModuleNVX const &, const char * const &>
17487 # endif
17488  reflect() const VULKAN_HPP_NOEXCEPT
17489  {
17490  return std::tie( sType, pNext, module, pName );
17491  }
17492 #endif
17493 
17494 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
17495  std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
17496  {
17497  if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
17498  return cmp;
17499  if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
17500  return cmp;
17501  if ( auto cmp = module <=> rhs.module; cmp != 0 )
17502  return cmp;
17503  if ( pName != rhs.pName )
17504  if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
17505  return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
17506 
17507  return std::strong_ordering::equivalent;
17508  }
17509 #endif
17510 
17512  {
17513  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
17514  }
17515 
17517  {
17518  return !operator==( rhs );
17519  }
17520 
17521  public:
17523  const void * pNext = {};
17525  const char * pName = {};
17526  };
17527 
17528  template <>
17530  {
17532  };
17533 
17535  {
17537 
17538  static const bool allowDuplicate = false;
17540 
17541 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
17543  uint32_t gridDimX_ = {},
17544  uint32_t gridDimY_ = {},
17545  uint32_t gridDimZ_ = {},
17546  uint32_t blockDimX_ = {},
17547  uint32_t blockDimY_ = {},
17548  uint32_t blockDimZ_ = {},
17549  uint32_t sharedMemBytes_ = {},
17550  size_t paramCount_ = {},
17551  const void * const * pParams_ = {},
17552  size_t extraCount_ = {},
17553  const void * const * pExtras_ = {},
17554  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
17555  : pNext( pNext_ )
17556  , function( function_ )
17557  , gridDimX( gridDimX_ )
17558  , gridDimY( gridDimY_ )
17559  , gridDimZ( gridDimZ_ )
17560  , blockDimX( blockDimX_ )
17561  , blockDimY( blockDimY_ )
17562  , blockDimZ( blockDimZ_ )
17563  , sharedMemBytes( sharedMemBytes_ )
17564  , paramCount( paramCount_ )
17565  , pParams( pParams_ )
17566  , extraCount( extraCount_ )
17567  , pExtras( pExtras_ )
17568  {
17569  }
17570 
17572 
17573  CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuLaunchInfoNVX( *reinterpret_cast<CuLaunchInfoNVX const *>( &rhs ) ) {}
17574 
17575 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
17577  uint32_t gridDimX_,
17578  uint32_t gridDimY_,
17579  uint32_t gridDimZ_,
17580  uint32_t blockDimX_,
17581  uint32_t blockDimY_,
17582  uint32_t blockDimZ_,
17583  uint32_t sharedMemBytes_,
17586  const void * pNext_ = nullptr )
17587  : pNext( pNext_ )
17588  , function( function_ )
17589  , gridDimX( gridDimX_ )
17590  , gridDimY( gridDimY_ )
17591  , gridDimZ( gridDimZ_ )
17592  , blockDimX( blockDimX_ )
17593  , blockDimY( blockDimY_ )
17594  , blockDimZ( blockDimZ_ )
17595  , sharedMemBytes( sharedMemBytes_ )
17596  , paramCount( params_.size() )
17597  , pParams( params_.data() )
17598  , extraCount( extras_.size() )
17599  , pExtras( extras_.data() )
17600  {
17601  }
17602 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17603 
17604  CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
17605 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
17606 
17608  {
17609  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX const *>( &rhs );
17610  return *this;
17611  }
17612 
17613 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
17615  {
17616  pNext = pNext_;
17617  return *this;
17618  }
17619 
17621  {
17622  function = function_;
17623  return *this;
17624  }
17625 
17627  {
17628  gridDimX = gridDimX_;
17629  return *this;
17630  }
17631 
17633  {
17634  gridDimY = gridDimY_;
17635  return *this;
17636  }
17637 
17639  {
17640  gridDimZ = gridDimZ_;
17641  return *this;
17642  }
17643 
17645  {
17646  blockDimX = blockDimX_;
17647  return *this;
17648  }
17649 
17651  {
17652  blockDimY = blockDimY_;
17653  return *this;
17654  }
17655 
17657  {
17658  blockDimZ = blockDimZ_;
17659  return *this;
17660  }
17661 
17663  {
17664  sharedMemBytes = sharedMemBytes_;
17665  return *this;
17666  }
17667 
17669  {
17670  paramCount = paramCount_;
17671  return *this;
17672  }
17673 
17675  {
17676  pParams = pParams_;
17677  return *this;
17678  }
17679 
17680 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
17682  {
17683  paramCount = params_.size();
17684  pParams = params_.data();
17685  return *this;
17686  }
17687 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17688 
17690  {
17691  extraCount = extraCount_;
17692  return *this;
17693  }
17694 
17696  {
17697  pExtras = pExtras_;
17698  return *this;
17699  }
17700 
17701 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
17703  {
17704  extraCount = extras_.size();
17705  pExtras = extras_.data();
17706  return *this;
17707  }
17708 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17709 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
17710 
17711  operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT
17712  {
17713  return *reinterpret_cast<const VkCuLaunchInfoNVX *>( this );
17714  }
17715 
17717  {
17718  return *reinterpret_cast<VkCuLaunchInfoNVX *>( this );
17719  }
17720 
17721 #if defined( VULKAN_HPP_USE_REFLECT )
17722 # if 14 <= VULKAN_HPP_CPP_VERSION
17723  auto
17724 # else
17725  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
17726  const void * const &,
17728  uint32_t const &,
17729  uint32_t const &,
17730  uint32_t const &,
17731  uint32_t const &,
17732  uint32_t const &,
17733  uint32_t const &,
17734  uint32_t const &,
17735  size_t const &,
17736  const void * const * const &,
17737  size_t const &,
17738  const void * const * const &>
17739 # endif
17740  reflect() const VULKAN_HPP_NOEXCEPT
17741  {
17742  return std::tie(
17744  }
17745 #endif
17746 
17747 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
17748  auto operator<=>( CuLaunchInfoNVX const & ) const = default;
17749 #else
17751  {
17752 # if defined( VULKAN_HPP_USE_REFLECT )
17753  return this->reflect() == rhs.reflect();
17754 # else
17755  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) &&
17756  ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) &&
17757  ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) &&
17758  ( pExtras == rhs.pExtras );
17759 # endif
17760  }
17761 
17763  {
17764  return !operator==( rhs );
17765  }
17766 #endif
17767 
17768  public:
17770  const void * pNext = {};
17772  uint32_t gridDimX = {};
17773  uint32_t gridDimY = {};
17774  uint32_t gridDimZ = {};
17775  uint32_t blockDimX = {};
17776  uint32_t blockDimY = {};
17777  uint32_t blockDimZ = {};
17778  uint32_t sharedMemBytes = {};
17779  size_t paramCount = {};
17780  const void * const * pParams = {};
17781  size_t extraCount = {};
17782  const void * const * pExtras = {};
17783  };
17784 
17785  template <>
17787  {
17789  };
17790 
17792  {
17794 
17795  static const bool allowDuplicate = false;
17797 
17798 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
17799  VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
17800  : pNext( pNext_ )
17801  , dataSize( dataSize_ )
17802  , pData( pData_ )
17803  {
17804  }
17805 
17807 
17809  : CuModuleCreateInfoNVX( *reinterpret_cast<CuModuleCreateInfoNVX const *>( &rhs ) )
17810  {
17811  }
17812 
17813 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
17814  template <typename T>
17816  : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() )
17817  {
17818  }
17819 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17820 
17822 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
17823 
17825  {
17826  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const *>( &rhs );
17827  return *this;
17828  }
17829 
17830 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
17832  {
17833  pNext = pNext_;
17834  return *this;
17835  }
17836 
17838  {
17839  dataSize = dataSize_;
17840  return *this;
17841  }
17842 
17844  {
17845  pData = pData_;
17846  return *this;
17847  }
17848 
17849 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
17850  template <typename T>
17852  {
17853  dataSize = data_.size() * sizeof( T );
17854  pData = data_.data();
17855  return *this;
17856  }
17857 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17858 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
17859 
17861  {
17862  return *reinterpret_cast<const VkCuModuleCreateInfoNVX *>( this );
17863  }
17864 
17866  {
17867  return *reinterpret_cast<VkCuModuleCreateInfoNVX *>( this );
17868  }
17869 
17870 #if defined( VULKAN_HPP_USE_REFLECT )
17871 # if 14 <= VULKAN_HPP_CPP_VERSION
17872  auto
17873 # else
17874  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, size_t const &, const void * const &>
17875 # endif
17876  reflect() const VULKAN_HPP_NOEXCEPT
17877  {
17878  return std::tie( sType, pNext, dataSize, pData );
17879  }
17880 #endif
17881 
17882 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
17883  auto operator<=>( CuModuleCreateInfoNVX const & ) const = default;
17884 #else
17886  {
17887 # if defined( VULKAN_HPP_USE_REFLECT )
17888  return this->reflect() == rhs.reflect();
17889 # else
17890  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
17891 # endif
17892  }
17893 
17895  {
17896  return !operator==( rhs );
17897  }
17898 #endif
17899 
17900  public:
17902  const void * pNext = {};
17903  size_t dataSize = {};
17904  const void * pData = {};
17905  };
17906 
17907  template <>
17909  {
17911  };
17912 
17913 #if defined( VK_USE_PLATFORM_WIN32_KHR )
17914  struct D3D12FenceSubmitInfoKHR
17915  {
17916  using NativeType = VkD3D12FenceSubmitInfoKHR;
17917 
17918  static const bool allowDuplicate = false;
17919  static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
17920 
17921 # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
17922  VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {},
17923  const uint64_t * pWaitSemaphoreValues_ = {},
17924  uint32_t signalSemaphoreValuesCount_ = {},
17925  const uint64_t * pSignalSemaphoreValues_ = {},
17926  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
17927  : pNext( pNext_ )
17928  , waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
17929  , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
17930  , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
17931  , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
17932  {
17933  }
17934 
17935  VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
17936 
17937  D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
17938  : D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
17939  {
17940  }
17941 
17942 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
17943  D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
17944  VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {},
17945  const void * pNext_ = nullptr )
17946  : pNext( pNext_ )
17947  , waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
17948  , pWaitSemaphoreValues( waitSemaphoreValues_.data() )
17949  , signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) )
17950  , pSignalSemaphoreValues( signalSemaphoreValues_.data() )
17951  {
17952  }
17953 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17954 
17955  D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
17956 # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
17957 
17958  D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
17959  {
17960  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
17961  return *this;
17962  }
17963 
17964 # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
17965  VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
17966  {
17967  pNext = pNext_;
17968  return *this;
17969  }
17970 
17971  VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
17972  {
17973  waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
17974  return *this;
17975  }
17976 
17977  VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
17978  {
17979  pWaitSemaphoreValues = pWaitSemaphoreValues_;
17980  return *this;
17981  }
17982 
17983 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
17984  D3D12FenceSubmitInfoKHR &
17985  setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
17986  {
17987  waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
17988  pWaitSemaphoreValues = waitSemaphoreValues_.data();
17989  return *this;
17990  }
17991 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17992 
17993  VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
17994  {
17995  signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
17996  return *this;
17997  }
17998 
17999  VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
18000  {
18001  pSignalSemaphoreValues = pSignalSemaphoreValues_;
18002  return *this;
18003  }
18004 
18005 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
18006  D3D12FenceSubmitInfoKHR &
18007  setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
18008  {
18009  signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
18010  pSignalSemaphoreValues = signalSemaphoreValues_.data();
18011  return *this;
18012  }
18013 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18014 # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
18015 
18016  operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
18017  {
18018  return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR *>( this );
18019  }
18020 
18022  {
18023  return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR *>( this );
18024  }
18025 
18026 # if defined( VULKAN_HPP_USE_REFLECT )
18027 # if 14 <= VULKAN_HPP_CPP_VERSION
18028  auto
18029 # else
18030  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
18031  const void * const &,
18032  uint32_t const &,
18033  const uint64_t * const &,
18034  uint32_t const &,
18035  const uint64_t * const &>
18036 # endif
18037  reflect() const VULKAN_HPP_NOEXCEPT
18038  {
18039  return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues );
18040  }
18041 # endif
18042 
18043 # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
18044  auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default;
18045 # else
18046  bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
18047  {
18048 # if defined( VULKAN_HPP_USE_REFLECT )
18049  return this->reflect() == rhs.reflect();
18050 # else
18051  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) &&
18052  ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) &&
18053  ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
18054 # endif
18055  }
18056 
18057  bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
18058  {
18059  return !operator==( rhs );
18060  }
18061 # endif
18062 
18063  public:
18064  VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
18065  const void * pNext = {};
18066  uint32_t waitSemaphoreValuesCount = {};
18067  const uint64_t * pWaitSemaphoreValues = {};
18068  uint32_t signalSemaphoreValuesCount = {};
18069  const uint64_t * pSignalSemaphoreValues = {};
18070  };
18071 
18072  template <>
18073  struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
18074  {
18075  using Type = D3D12FenceSubmitInfoKHR;
18076  };
18077 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
18078 
18080  {
18082 
18083  static const bool allowDuplicate = false;
18085 
18086 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
18088  DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, std::array<float, 4> const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
18089  : pNext( pNext_ )
18090  , pMarkerName( pMarkerName_ )
18091  , color( color_ )
18092  {
18093  }
18094 
18096 
18098  : DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
18099  {
18100  }
18101 
18103 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
18104 
18106  {
18107  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
18108  return *this;
18109  }
18110 
18111 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
18113  {
18114  pNext = pNext_;
18115  return *this;
18116  }
18117 
18119  {
18120  pMarkerName = pMarkerName_;
18121  return *this;
18122  }
18123 
18125  {
18126  color = color_;
18127  return *this;
18128  }
18129 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
18130 
18132  {
18133  return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( this );
18134  }
18135 
18137  {
18138  return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT *>( this );
18139  }
18140 
18141 #if defined( VULKAN_HPP_USE_REFLECT )
18142 # if 14 <= VULKAN_HPP_CPP_VERSION
18143  auto
18144 # else
18145  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
18146 # endif
18147  reflect() const VULKAN_HPP_NOEXCEPT
18148  {
18149  return std::tie( sType, pNext, pMarkerName, color );
18150  }
18151 #endif
18152 
18153 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
18154  std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
18155  {
18156  if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
18157  return cmp;
18158  if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
18159  return cmp;
18160  if ( pMarkerName != rhs.pMarkerName )
18161  if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 )
18162  return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
18163  if ( auto cmp = color <=> rhs.color; cmp != 0 )
18164  return cmp;
18165 
18166  return std::partial_ordering::equivalent;
18167  }
18168 #endif
18169 
18171  {
18172  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) &&
18173  ( color == rhs.color );
18174  }
18175 
18177  {
18178  return !operator==( rhs );
18179  }
18180 
18181  public:
18183  const void * pNext = {};
18184  const char * pMarkerName = {};
18186  };
18187 
18188  template <>
18190  {
18192  };
18193 
18195  {
18197 
18198  static const bool allowDuplicate = false;
18200 
18201 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
18204  uint64_t object_ = {},
18205  const char * pObjectName_ = {},
18206  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
18207  : pNext( pNext_ )
18208  , objectType( objectType_ )
18209  , object( object_ )
18210  , pObjectName( pObjectName_ )
18211  {
18212  }
18213 
18215 
18217  : DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
18218  {
18219  }
18220 
18222 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
18223 
18225  {
18226  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
18227  return *this;
18228  }
18229 
18230 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
18232  {
18233  pNext = pNext_;
18234  return *this;
18235  }
18236 
18238  {
18239  objectType = objectType_;
18240  return *this;
18241  }
18242 
18244  {
18245  object = object_;
18246  return *this;
18247  }
18248 
18250  {
18251  pObjectName = pObjectName_;
18252  return *this;
18253  }
18254 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
18255 
18257  {
18258  return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( this );
18259  }
18260 
18262  {
18263  return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>( this );
18264  }
18265 
18266 #if defined( VULKAN_HPP_USE_REFLECT )
18267 # if 14 <= VULKAN_HPP_CPP_VERSION
18268  auto
18269 # else
18270  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
18271  const void * const &,
18273  uint64_t const &,
18274  const char * const &>
18275 # endif
18276  reflect() const VULKAN_HPP_NOEXCEPT
18277  {
18278  return std::tie( sType, pNext, objectType, object, pObjectName );
18279  }
18280 #endif
18281 
18282 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
18283  std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
18284  {
18285  if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
18286  return cmp;
18287  if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
18288  return cmp;
18289  if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 )
18290  return cmp;
18291  if ( auto cmp = object <=> rhs.object; cmp != 0 )
18292  return cmp;
18293  if ( pObjectName != rhs.pObjectName )
18294  if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
18295  return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
18296 
18297  return std::strong_ordering::equivalent;
18298  }
18299 #endif
18300 
18302  {
18303  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) &&
18304  ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
18305  }
18306 
18308  {
18309  return !operator==( rhs );
18310  }
18311 
18312  public:
18314  const void * pNext = {};
18316  uint64_t object = {};
18317  const char * pObjectName = {};
18318  };
18319 
18320  template <>
18322  {
18324  };
18325 
18327  {
18329 
18330  static const bool allowDuplicate = false;
18332 
18333 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
18336  uint64_t object_ = {},
18337  uint64_t tagName_ = {},
18338  size_t tagSize_ = {},
18339  const void * pTag_ = {},
18340  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
18341  : pNext( pNext_ )
18342  , objectType( objectType_ )
18343  , object( object_ )
18344  , tagName( tagName_ )
18345  , tagSize( tagSize_ )
18346  , pTag( pTag_ )
18347  {
18348  }
18349 
18351 
18353  : DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
18354  {
18355  }
18356 
18357 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
18358  template <typename T>
18360  uint64_t object_,
18361  uint64_t tagName_,
18363  const void * pNext_ = nullptr )
18364  : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof( T ) ), pTag( tag_.data() )
18365  {
18366  }
18367 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18368 
18370 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
18371 
18373  {
18374  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
18375  return *this;
18376  }
18377 
18378 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
18380  {
18381  pNext = pNext_;
18382  return *this;
18383  }
18384 
18386  {
18387  objectType = objectType_;
18388  return *this;
18389  }
18390 
18392  {
18393  object = object_;
18394  return *this;
18395  }
18396 
18398  {
18399  tagName = tagName_;
18400  return *this;
18401  }
18402 
18404  {
18405  tagSize = tagSize_;
18406  return *this;
18407  }
18408 
18410  {
18411  pTag = pTag_;
18412  return *this;
18413  }
18414 
18415 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
18416  template <typename T>
18418  {
18419  tagSize = tag_.size() * sizeof( T );
18420  pTag = tag_.data();
18421  return *this;
18422  }
18423 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18424 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
18425 
18427  {
18428  return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( this );
18429  }
18430 
18432  {
18433  return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>( this );
18434  }
18435 
18436 #if defined( VULKAN_HPP_USE_REFLECT )
18437 # if 14 <= VULKAN_HPP_CPP_VERSION
18438  auto
18439 # else
18440  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
18441  const void * const &,
18443  uint64_t const &,
18444  uint64_t const &,
18445  size_t const &,
18446  const void * const &>
18447 # endif
18448  reflect() const VULKAN_HPP_NOEXCEPT
18449  {
18450  return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag );
18451  }
18452 #endif
18453 
18454 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
18455  auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default;
18456 #else
18458  {
18459 # if defined( VULKAN_HPP_USE_REFLECT )
18460  return this->reflect() == rhs.reflect();
18461 # else
18462  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && ( tagName == rhs.tagName ) &&
18463  ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag );
18464 # endif
18465  }
18466 
18468  {
18469  return !operator==( rhs );
18470  }
18471 #endif
18472 
18473  public:
18475  const void * pNext = {};
18477  uint64_t object = {};
18478  uint64_t tagName = {};
18479  size_t tagSize = {};
18480  const void * pTag = {};
18481  };
18482 
18483  template <>
18485  {
18487  };
18488 
18490  {
18492 
18493  static const bool allowDuplicate = false;
18495 
18496 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
18498  PFN_vkDebugReportCallbackEXT pfnCallback_ = {},
18499  void * pUserData_ = {},
18500  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
18501  : pNext( pNext_ )
18502  , flags( flags_ )
18503  , pfnCallback( pfnCallback_ )
18504  , pUserData( pUserData_ )
18505  {
18506  }
18507 
18509 
18511  : DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
18512  {
18513  }
18514 
18516 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
18517 
18519  {
18520  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
18521  return *this;
18522  }
18523 
18524 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
18526  {
18527  pNext = pNext_;
18528  return *this;
18529  }
18530 
18532  {
18533  flags = flags_;
18534  return *this;
18535  }
18536 
18538  {
18539  pfnCallback = pfnCallback_;
18540  return *this;
18541  }
18542 
18544  {
18545  pUserData = pUserData_;
18546  return *this;
18547  }
18548 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
18549 
18551  {
18552  return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( this );
18553  }
18554 
18556  {
18557  return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT *>( this );
18558  }
18559 
18560 #if defined( VULKAN_HPP_USE_REFLECT )
18561 # if 14 <= VULKAN_HPP_CPP_VERSION
18562  auto
18563 # else
18564  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
18565  const void * const &,
18568  void * const &>
18569 # endif
18570  reflect() const VULKAN_HPP_NOEXCEPT
18571  {
18572  return std::tie( sType, pNext, flags, pfnCallback, pUserData );
18573  }
18574 #endif
18575 
18577  {
18578 #if defined( VULKAN_HPP_USE_REFLECT )
18579  return this->reflect() == rhs.reflect();
18580 #else
18581  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData );
18582 #endif
18583  }
18584 
18586  {
18587  return !operator==( rhs );
18588  }
18589 
18590  public:
18592  const void * pNext = {};
18595  void * pUserData = {};
18596  };
18597 
18598  template <>
18600  {
18602  };
18603 
18605  {
18607 
18608  static const bool allowDuplicate = false;
18610 
18611 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
18613  DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array<float, 4> const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
18614  : pNext( pNext_ )
18615  , pLabelName( pLabelName_ )
18616  , color( color_ )
18617  {
18618  }
18619 
18621 
18622  DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) ) {}
18623 
18625 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
18626 
18628  {
18629  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
18630  return *this;
18631  }
18632 
18633 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
18635  {
18636  pNext = pNext_;
18637  return *this;
18638  }
18639 
18641  {
18642  pLabelName = pLabelName_;
18643  return *this;
18644  }
18645 
18647  {
18648  color = color_;
18649  return *this;
18650  }
18651 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
18652 
18654  {
18655  return *reinterpret_cast<const VkDebugUtilsLabelEXT *>( this );
18656  }
18657 
18659  {
18660  return *reinterpret_cast<VkDebugUtilsLabelEXT *>( this );
18661  }
18662 
18663 #if defined( VULKAN_HPP_USE_REFLECT )
18664 # if 14 <= VULKAN_HPP_CPP_VERSION
18665  auto
18666 # else
18667  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
18668 # endif
18669  reflect() const VULKAN_HPP_NOEXCEPT
18670  {
18671  return std::tie( sType, pNext, pLabelName, color );
18672  }
18673 #endif
18674 
18675 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
18676  std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
18677  {
18678  if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
18679  return cmp;
18680  if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
18681  return cmp;
18682  if ( pLabelName != rhs.pLabelName )
18683  if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 )
18684  return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
18685  if ( auto cmp = color <=> rhs.color; cmp != 0 )
18686  return cmp;
18687 
18688  return std::partial_ordering::equivalent;
18689  }
18690 #endif
18691 
18693  {
18694  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) &&
18695  ( color == rhs.color );
18696  }
18697 
18699  {
18700  return !operator==( rhs );
18701  }
18702 
18703  public:
18705  const void * pNext = {};
18706  const char * pLabelName = {};
18708  };
18709 
18710  template <>
18712  {
18714  };
18715 
18717  {
18719 
18720  static const bool allowDuplicate = false;
18722 
18723 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
18725  uint64_t objectHandle_ = {},
18726  const char * pObjectName_ = {},
18727  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
18728  : pNext( pNext_ )
18729  , objectType( objectType_ )
18730  , objectHandle( objectHandle_ )
18731  , pObjectName( pObjectName_ )
18732  {
18733  }
18734 
18736 
18738  : DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
18739  {
18740  }
18741 
18743 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
18744 
18746  {
18747  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
18748  return *this;
18749  }
18750 
18751 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
18753  {
18754  pNext = pNext_;
18755  return *this;
18756  }
18757 
18759  {
18760  objectType = objectType_;
18761  return *this;
18762  }
18763 
18765  {
18766  objectHandle = objectHandle_;
18767  return *this;
18768  }
18769 
18771  {
18772  pObjectName = pObjectName_;
18773  return *this;
18774  }
18775 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
18776 
18778  {
18779  return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( this );
18780  }
18781 
18783  {
18784  return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT *>( this );
18785  }
18786 
18787 #if defined( VULKAN_HPP_USE_REFLECT )
18788 # if 14 <= VULKAN_HPP_CPP_VERSION
18789  auto
18790 # else
18791  std::
18792  tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, const char * const &>
18793 # endif
18794  reflect() const VULKAN_HPP_NOEXCEPT
18795  {
18796  return std::tie( sType, pNext, objectType, objectHandle, pObjectName );
18797  }
18798 #endif
18799 
18800 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
18801  std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
18802  {
18803  if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
18804  return cmp;
18805  if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
18806  return cmp;
18807  if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 )
18808  return cmp;
18809  if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 )
18810  return cmp;
18811  if ( pObjectName != rhs.pObjectName )
18812  if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
18813  return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
18814 
18815  return std::strong_ordering::equivalent;
18816  }
18817 #endif
18818 
18820  {
18821  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) &&
18822  ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
18823  }
18824 
18826  {
18827  return !operator==( rhs );
18828  }
18829 
18830  public:
18832  const void * pNext = {};
18834  uint64_t objectHandle = {};
18835  const char * pObjectName = {};
18836  };
18837 
18838  template <>
18840  {
18842  };
18843 
18845  {
18847 
18848  static const bool allowDuplicate = false;
18850 
18851 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
18853  const char * pMessageIdName_ = {},
18854  int32_t messageIdNumber_ = {},
18855  const char * pMessage_ = {},
18856  uint32_t queueLabelCount_ = {},
18857  const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {},
18858  uint32_t cmdBufLabelCount_ = {},
18859  const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {},
18860  uint32_t objectCount_ = {},
18862  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
18863  : pNext( pNext_ )
18864  , flags( flags_ )
18865  , pMessageIdName( pMessageIdName_ )
18866  , messageIdNumber( messageIdNumber_ )
18867  , pMessage( pMessage_ )
18868  , queueLabelCount( queueLabelCount_ )
18869  , pQueueLabels( pQueueLabels_ )
18870  , cmdBufLabelCount( cmdBufLabelCount_ )
18871  , pCmdBufLabels( pCmdBufLabels_ )
18872  , objectCount( objectCount_ )
18873  , pObjects( pObjects_ )
18874  {
18875  }
18876 
18878 
18880  : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
18881  {
18882  }
18883 
18884 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
18887  const char * pMessageIdName_,
18888  int32_t messageIdNumber_,
18889  const char * pMessage_,
18893  const void * pNext_ = nullptr )
18894  : pNext( pNext_ )
18895  , flags( flags_ )
18896  , pMessageIdName( pMessageIdName_ )
18897  , messageIdNumber( messageIdNumber_ )
18898  , pMessage( pMessage_ )
18899  , queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) )
18900  , pQueueLabels( queueLabels_.data() )
18901  , cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) )
18902  , pCmdBufLabels( cmdBufLabels_.data() )
18903  , objectCount( static_cast<uint32_t>( objects_.size() ) )
18904  , pObjects( objects_.data() )
18905  {
18906  }
18907 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18908 
18910 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
18911 
18913  {
18914  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
18915  return *this;
18916  }
18917 
18918 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
18920  {
18921  pNext = pNext_;
18922  return *this;
18923  }
18924 
18927  {
18928  flags = flags_;
18929  return *this;
18930  }
18931 
18933  {
18934  pMessageIdName = pMessageIdName_;
18935  return *this;
18936  }
18937 
18939  {
18940  messageIdNumber = messageIdNumber_;
18941  return *this;
18942  }
18943 
18945  {
18946  pMessage = pMessage_;
18947  return *this;
18948  }
18949 
18951  {
18952  queueLabelCount = queueLabelCount_;
18953  return *this;
18954  }
18955 
18958  {
18959  pQueueLabels = pQueueLabels_;
18960  return *this;
18961  }
18962 
18963 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
18966  {
18967  queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
18968  pQueueLabels = queueLabels_.data();
18969  return *this;
18970  }
18971 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18972 
18974  {
18975  cmdBufLabelCount = cmdBufLabelCount_;
18976  return *this;
18977  }
18978 
18981  {
18982  pCmdBufLabels = pCmdBufLabels_;
18983  return *this;
18984  }
18985 
18986 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
18989  {
18990  cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
18991  pCmdBufLabels = cmdBufLabels_.data();
18992  return *this;
18993  }
18994 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18995 
18997  {
18998  objectCount = objectCount_;
18999  return *this;
19000  }
19001 
19004  {
19005  pObjects = pObjects_;
19006  return *this;
19007  }
19008 
19009 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
19012  {
19013  objectCount = static_cast<uint32_t>( objects_.size() );
19014  pObjects = objects_.data();
19015  return *this;
19016  }
19017 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19018 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
19019 
19021  {
19022  return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( this );
19023  }
19024 
19026  {
19027  return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT *>( this );
19028  }
19029 
19030 #if defined( VULKAN_HPP_USE_REFLECT )
19031 # if 14 <= VULKAN_HPP_CPP_VERSION
19032  auto
19033 # else
19034  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
19035  const void * const &,
19037  const char * const &,
19038  int32_t const &,
19039  const char * const &,
19040  uint32_t const &,
19042  uint32_t const &,
19044  uint32_t const &,
19046 # endif
19047  reflect() const VULKAN_HPP_NOEXCEPT
19048  {
19049  return std::tie(
19051  }
19052 #endif
19053 
19054 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
19055  std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
19056  {
19057  if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
19058  return cmp;
19059  if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
19060  return cmp;
19061  if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
19062  return cmp;
19063  if ( pMessageIdName != rhs.pMessageIdName )
19064  if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 )
19065  return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
19066  if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 )
19067  return cmp;
19068  if ( pMessage != rhs.pMessage )
19069  if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 )
19070  return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
19071  if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 )
19072  return cmp;
19073  if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 )
19074  return cmp;
19075  if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 )
19076  return cmp;
19077  if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 )
19078  return cmp;
19079  if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 )
19080  return cmp;
19081  if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 )
19082  return cmp;
19083 
19084  return std::strong_ordering::equivalent;
19085  }
19086 #endif
19087 
19089  {
19090  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
19091  ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) &&
19092  ( messageIdNumber == rhs.messageIdNumber ) && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) &&
19093  ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) &&
19094  ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects );
19095  }
19096 
19098  {
19099  return !operator==( rhs );
19100  }
19101 
19102  public:
19104  const void * pNext = {};
19106  const char * pMessageIdName = {};
19107  int32_t messageIdNumber = {};
19108  const char * pMessage = {};
19109  uint32_t queueLabelCount = {};
19111  uint32_t cmdBufLabelCount = {};
19113  uint32_t objectCount = {};
19115  };
19116 
19117  template <>
19119  {
19121  };
19122 
19124  {
19126 
19127  static const bool allowDuplicate = true;
19129 
19130 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
19134  PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {},
19135  void * pUserData_ = {},
19136  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
19137  : pNext( pNext_ )
19138  , flags( flags_ )
19139  , messageSeverity( messageSeverity_ )
19140  , messageType( messageType_ )
19141  , pfnUserCallback( pfnUserCallback_ )
19142  , pUserData( pUserData_ )
19143  {
19144  }
19145 
19147 
19149  : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
19150  {
19151  }
19152 
19154 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
19155 
19157  {
19158  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
19159  return *this;
19160  }
19161 
19162 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
19164  {
19165  pNext = pNext_;
19166  return *this;
19167  }
19168 
19170  {
19171  flags = flags_;
19172  return *this;
19173  }
19174 
19177  {
19178  messageSeverity = messageSeverity_;
19179  return *this;
19180  }
19181 
19184  {
19185  messageType = messageType_;
19186  return *this;
19187  }
19188 
19190  {
19191  pfnUserCallback = pfnUserCallback_;
19192  return *this;
19193  }
19194 
19196  {
19197  pUserData = pUserData_;
19198  return *this;
19199  }
19200 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
19201 
19203  {
19204  return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( this );
19205  }
19206 
19208  {
19209  return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT *>( this );
19210  }
19211 
19212 #if defined( VULKAN_HPP_USE_REFLECT )
19213 # if 14 <= VULKAN_HPP_CPP_VERSION
19214  auto
19215 # else
19216  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
19217  const void * const &,
19222  void * const &>
19223 # endif
19224  reflect() const VULKAN_HPP_NOEXCEPT
19225  {
19227  }
19228 #endif
19229 
19231  {
19232 #if defined( VULKAN_HPP_USE_REFLECT )
19233  return this->reflect() == rhs.reflect();
19234 #else
19235  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( messageSeverity == rhs.messageSeverity ) &&
19236  ( messageType == rhs.messageType ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData );
19237 #endif
19238  }
19239 
19241  {
19242  return !operator==( rhs );
19243  }
19244 
19245  public:
19247  const void * pNext = {};
19252  void * pUserData = {};
19253  };
19254 
19255  template <>
19257  {
19259  };
19260 
19262  {
19264 
19265  static const bool allowDuplicate = false;
19267 
19268 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
19270  uint64_t objectHandle_ = {},
19271  uint64_t tagName_ = {},
19272  size_t tagSize_ = {},
19273  const void * pTag_ = {},
19274  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
19275  : pNext( pNext_ )
19276  , objectType( objectType_ )
19277  , objectHandle( objectHandle_ )
19278  , tagName( tagName_ )
19279  , tagSize( tagSize_ )
19280  , pTag( pTag_ )
19281  {
19282  }
19283 
19285 
19287  : DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
19288  {
19289  }
19290 
19291 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
19292  template <typename T>
19294  uint64_t objectHandle_,
19295  uint64_t tagName_,
19297  const void * pNext_ = nullptr )
19298  : pNext( pNext_ )
19299  , objectType( objectType_ )
19300  , objectHandle( objectHandle_ )
19301  , tagName( tagName_ )
19302  , tagSize( tag_.size() * sizeof( T ) )
19303  , pTag( tag_.data() )
19304  {
19305  }
19306 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19307 
19309 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
19310 
19312  {
19313  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
19314  return *this;
19315  }
19316 
19317 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
19319  {
19320  pNext = pNext_;
19321  return *this;
19322  }
19323 
19325  {
19326  objectType = objectType_;
19327  return *this;
19328  }
19329 
19331  {
19332  objectHandle = objectHandle_;
19333  return *this;
19334  }
19335 
19337  {
19338  tagName = tagName_;
19339  return *this;
19340  }
19341 
19343  {
19344  tagSize = tagSize_;
19345  return *this;
19346  }
19347 
19349  {
19350  pTag = pTag_;
19351  return *this;
19352  }
19353 
19354 # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
19355  template <typename T>
19357  {
19358  tagSize = tag_.size() * sizeof( T );
19359  pTag = tag_.data();
19360  return *this;
19361  }
19362 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19363 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
19364 
19366  {
19367  return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( this );
19368  }
19369 
19371  {
19372  return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT *>( this );
19373  }
19374 
19375 #if defined( VULKAN_HPP_USE_REFLECT )
19376 # if 14 <= VULKAN_HPP_CPP_VERSION
19377  auto
19378 # else
19379  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
19380  const void * const &,
19382  uint64_t const &,
19383  uint64_t const &,
19384  size_t const &,
19385  const void * const &>
19386 # endif
19387  reflect() const VULKAN_HPP_NOEXCEPT
19388  {
19389  return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag );
19390  }
19391 #endif
19392 
19393 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
19394  auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default;
19395 #else
19397  {
19398 # if defined( VULKAN_HPP_USE_REFLECT )
19399  return this->reflect() == rhs.reflect();
19400 # else
19401  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) &&
19402  ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag );
19403 # endif
19404  }
19405 
19407  {
19408  return !operator==( rhs );
19409  }
19410 #endif
19411 
19412  public:
19414  const void * pNext = {};
19416  uint64_t objectHandle = {};
19417  uint64_t tagName = {};
19418  size_t tagSize = {};
19419  const void * pTag = {};
19420  };
19421 
19422  template <>
19424  {
19426  };
19427 
19429  {
19431 
19432 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
19434  VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {},
19435  VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ = {},
19436  VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ = {},
19438  : srcAddress( srcAddress_ )
19439  , dstAddress( dstAddress_ )
19440  , compressedSize( compressedSize_ )
19441  , decompressedSize( decompressedSize_ )
19442  , decompressionMethod( decompressionMethod_ )
19443  {
19444  }
19445 
19447 
19449  : DecompressMemoryRegionNV( *reinterpret_cast<DecompressMemoryRegionNV const *>( &rhs ) )
19450  {
19451  }
19452 
19454 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
19455 
19457  {
19458  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV const *>( &rhs );
19459  return *this;
19460  }
19461 
19462 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
19464  {
19465  srcAddress = srcAddress_;
19466  return *this;
19467  }
19468 
19470  {
19471  dstAddress = dstAddress_;
19472  return *this;
19473  }
19474 
19476  {
19477  compressedSize = compressedSize_;
19478  return *this;
19479  }
19480 
19482  {
19483  decompressedSize = decompressedSize_;
19484  return *this;
19485  }
19486 
19489  {
19490  decompressionMethod = decompressionMethod_;
19491  return *this;
19492  }
19493 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
19494 
19496  {
19497  return *reinterpret_cast<const VkDecompressMemoryRegionNV *>( this );
19498  }
19499 
19501  {
19502  return *reinterpret_cast<VkDecompressMemoryRegionNV *>( this );
19503  }
19504 
19505 #if defined( VULKAN_HPP_USE_REFLECT )
19506 # if 14 <= VULKAN_HPP_CPP_VERSION
19507  auto
19508 # else
19509  std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &,
19514 # endif
19515  reflect() const VULKAN_HPP_NOEXCEPT
19516  {
19518  }
19519 #endif
19520 
19521 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
19522  auto operator<=>( DecompressMemoryRegionNV const & ) const = default;
19523 #else
19525  {
19526 # if defined( VULKAN_HPP_USE_REFLECT )
19527  return this->reflect() == rhs.reflect();
19528 # else
19529  return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( compressedSize == rhs.compressedSize ) &&
19530  ( decompressedSize == rhs.decompressedSize ) && ( decompressionMethod == rhs.decompressionMethod );
19531 # endif
19532  }
19533 
19535  {
19536  return !operator==( rhs );
19537  }
19538 #endif
19539 
19540  public:
19546  };
19547 
19549  {
19551 
19552  static const bool allowDuplicate = false;
19554 
19555 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
19557  const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
19558  : pNext( pNext_ )
19559  , dedicatedAllocation( dedicatedAllocation_ )
19560  {
19561  }
19562 
19564 
19566  : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
19567  {
19568  }
19569 
19571 #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
19572 
19574  {
19575  *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
19576  return *this;
19577  }
19578 
19579 #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
19581  {
19582  pNext = pNext_;
19583  return *this;
19584  }
19585 
19588  {
19589  dedicatedAllocation = dedicatedAllocation_;
19590  return *this;
19591  }
19592 #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
19593 
19595  {
19596  return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV *>( this );
19597  }
19598 
19600  {
19601  return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV *>( this );
19602  }
19603 
19604 #if defined( VULKAN_HPP_USE_REFLECT )
19605 # if 14 <= VULKAN_HPP_CPP_VERSION
19606  auto
19607 # else
19608  std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
19609 # endif
19610  reflect() const VULKAN_HPP_NOEXCEPT
19611  {
19612  return std::tie( sType, pNext, dedicatedAllocation );
19613  }
19614 #endif
19615 
19616 #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
19617  auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default;
19618 #else
19620  {
19621 # if defined( VULKAN_HPP_USE_REFLECT )
19622  return this->reflect() == rhs.reflect();
19623 # else
19624  return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation );
19625 # endif
19626  }
19627 
19629  {
19630  return !operator==( rhs );
19631  }
19632 #endif
19633 
19634  public:
19636  const void * pNext = {};
19638  };
19639 
19640  template <>
19642  {
19644  };
19645 
19647  {
19649