From 7e691380166fb1cd9b193ac9db896bc23a4ea9ad Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Fri, 29 Sep 2023 02:43:21 -0700 Subject: Update for Vulkan-Docs 1.3.266 --- include/vulkan/vulkan_structs.hpp | 1378 +++++++++++++++++++++++++++++++++++++ 1 file changed, 1378 insertions(+) (limited to 'include/vulkan/vulkan_structs.hpp') diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp index 468e1f4..f5bf97a 100644 --- a/include/vulkan/vulkan_structs.hpp +++ b/include/vulkan/vulkan_structs.hpp @@ -5097,6 +5097,94 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferFormatResolvePropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferFormatResolvePropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatResolvePropertiesANDROID( VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , colorAttachmentFormat( colorAttachmentFormat_ ) + { + } + + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatResolvePropertiesANDROID( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatResolvePropertiesANDROID( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferFormatResolvePropertiesANDROID & + operator=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatResolvePropertiesANDROID & operator=( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorAttachmentFormat ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatResolvePropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentFormat == rhs.colorAttachmentFormat ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatResolvePropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + #if defined( VK_USE_PLATFORM_ANDROID_KHR ) struct AndroidHardwareBufferPropertiesANDROID { @@ -38030,6 +38118,352 @@ namespace VULKAN_HPP_NAMESPACE using Type = GeneratedCommandsMemoryRequirementsInfoNV; }; + struct LatencyTimingsFrameReportNV + { + using NativeType = VkLatencyTimingsFrameReportNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencyTimingsFrameReportNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( uint64_t presentID_ = {}, + uint64_t inputSampleTimeUs_ = {}, + uint64_t simStartTimeUs_ = {}, + uint64_t simEndTimeUs_ = {}, + uint64_t renderSubmitStartTimeUs_ = {}, + uint64_t renderSubmitEndTimeUs_ = {}, + uint64_t presentStartTimeUs_ = {}, + uint64_t presentEndTimeUs_ = {}, + uint64_t driverStartTimeUs_ = {}, + uint64_t driverEndTimeUs_ = {}, + uint64_t osRenderQueueStartTimeUs_ = {}, + uint64_t osRenderQueueEndTimeUs_ = {}, + uint64_t gpuRenderStartTimeUs_ = {}, + uint64_t gpuRenderEndTimeUs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentID( presentID_ ) + , inputSampleTimeUs( inputSampleTimeUs_ ) + , simStartTimeUs( simStartTimeUs_ ) + , simEndTimeUs( simEndTimeUs_ ) + , renderSubmitStartTimeUs( renderSubmitStartTimeUs_ ) + , renderSubmitEndTimeUs( renderSubmitEndTimeUs_ ) + , presentStartTimeUs( presentStartTimeUs_ ) + , presentEndTimeUs( presentEndTimeUs_ ) + , driverStartTimeUs( driverStartTimeUs_ ) + , driverEndTimeUs( driverEndTimeUs_ ) + , osRenderQueueStartTimeUs( osRenderQueueStartTimeUs_ ) + , osRenderQueueEndTimeUs( osRenderQueueEndTimeUs_ ) + , gpuRenderStartTimeUs( gpuRenderStartTimeUs_ ) + , gpuRenderEndTimeUs( gpuRenderEndTimeUs_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencyTimingsFrameReportNV( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencyTimingsFrameReportNV( *reinterpret_cast( &rhs ) ) + { + } + + LatencyTimingsFrameReportNV & operator=( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencyTimingsFrameReportNV & operator=( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setInputSampleTimeUs( uint64_t inputSampleTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + inputSampleTimeUs = inputSampleTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setSimStartTimeUs( uint64_t simStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + simStartTimeUs = simStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setSimEndTimeUs( uint64_t simEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + simEndTimeUs = simEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setRenderSubmitStartTimeUs( uint64_t renderSubmitStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + renderSubmitStartTimeUs = renderSubmitStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setRenderSubmitEndTimeUs( uint64_t renderSubmitEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + renderSubmitEndTimeUs = renderSubmitEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPresentStartTimeUs( uint64_t presentStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + presentStartTimeUs = presentStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPresentEndTimeUs( uint64_t presentEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + presentEndTimeUs = presentEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setDriverStartTimeUs( uint64_t driverStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + driverStartTimeUs = driverStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setDriverEndTimeUs( uint64_t driverEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + driverEndTimeUs = driverEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setOsRenderQueueStartTimeUs( uint64_t osRenderQueueStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + osRenderQueueStartTimeUs = osRenderQueueStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setOsRenderQueueEndTimeUs( uint64_t osRenderQueueEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + osRenderQueueEndTimeUs = osRenderQueueEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setGpuRenderStartTimeUs( uint64_t gpuRenderStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + gpuRenderStartTimeUs = gpuRenderStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setGpuRenderEndTimeUs( uint64_t gpuRenderEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + gpuRenderEndTimeUs = gpuRenderEndTimeUs_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencyTimingsFrameReportNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencyTimingsFrameReportNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + presentID, + inputSampleTimeUs, + simStartTimeUs, + simEndTimeUs, + renderSubmitStartTimeUs, + renderSubmitEndTimeUs, + presentStartTimeUs, + presentEndTimeUs, + driverStartTimeUs, + driverEndTimeUs, + osRenderQueueStartTimeUs, + osRenderQueueEndTimeUs, + gpuRenderStartTimeUs, + gpuRenderEndTimeUs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencyTimingsFrameReportNV const & ) const = default; +#else + bool operator==( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( inputSampleTimeUs == rhs.inputSampleTimeUs ) && + ( simStartTimeUs == rhs.simStartTimeUs ) && ( simEndTimeUs == rhs.simEndTimeUs ) && ( renderSubmitStartTimeUs == rhs.renderSubmitStartTimeUs ) && + ( renderSubmitEndTimeUs == rhs.renderSubmitEndTimeUs ) && ( presentStartTimeUs == rhs.presentStartTimeUs ) && + ( presentEndTimeUs == rhs.presentEndTimeUs ) && ( driverStartTimeUs == rhs.driverStartTimeUs ) && ( driverEndTimeUs == rhs.driverEndTimeUs ) && + ( osRenderQueueStartTimeUs == rhs.osRenderQueueStartTimeUs ) && ( osRenderQueueEndTimeUs == rhs.osRenderQueueEndTimeUs ) && + ( gpuRenderStartTimeUs == rhs.gpuRenderStartTimeUs ) && ( gpuRenderEndTimeUs == rhs.gpuRenderEndTimeUs ); +# endif + } + + bool operator!=( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencyTimingsFrameReportNV; + const void * pNext = {}; + uint64_t presentID = {}; + uint64_t inputSampleTimeUs = {}; + uint64_t simStartTimeUs = {}; + uint64_t simEndTimeUs = {}; + uint64_t renderSubmitStartTimeUs = {}; + uint64_t renderSubmitEndTimeUs = {}; + uint64_t presentStartTimeUs = {}; + uint64_t presentEndTimeUs = {}; + uint64_t driverStartTimeUs = {}; + uint64_t driverEndTimeUs = {}; + uint64_t osRenderQueueStartTimeUs = {}; + uint64_t osRenderQueueEndTimeUs = {}; + uint64_t gpuRenderStartTimeUs = {}; + uint64_t gpuRenderEndTimeUs = {}; + }; + + template <> + struct CppType + { + using Type = LatencyTimingsFrameReportNV; + }; + + struct GetLatencyMarkerInfoNV + { + using NativeType = VkGetLatencyMarkerInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGetLatencyMarkerInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pTimings( pTimings_ ) + { + } + + VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GetLatencyMarkerInfoNV( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GetLatencyMarkerInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + GetLatencyMarkerInfoNV & operator=( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GetLatencyMarkerInfoNV & operator=( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPTimings( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ ) VULKAN_HPP_NOEXCEPT + { + pTimings = pTimings_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pTimings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GetLatencyMarkerInfoNV const & ) const = default; +#else + bool operator==( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pTimings == rhs.pTimings ); +# endif + } + + bool operator!=( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGetLatencyMarkerInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings = {}; + }; + + template <> + struct CppType + { + using Type = GetLatencyMarkerInfoNV; + }; + struct VertexInputBindingDescription { using NativeType = VkVertexInputBindingDescription; @@ -47915,6 +48349,448 @@ namespace VULKAN_HPP_NAMESPACE using Type = InstanceCreateInfo; }; + struct LatencySleepInfoNV + { + using NativeType = VkLatencySleepInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + LatencySleepInfoNV( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , signalSemaphore( signalSemaphore_ ) + , value( value_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySleepInfoNV( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencySleepInfoNV( *reinterpret_cast( &rhs ) ) {} + + LatencySleepInfoNV & operator=( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencySleepInfoNV & operator=( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setSignalSemaphore( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphore = signalSemaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencySleepInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySleepInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, signalSemaphore, value ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySleepInfoNV const & ) const = default; +#else + bool operator==( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( signalSemaphore == rhs.signalSemaphore ) && ( value == rhs.value ); +# endif + } + + bool operator!=( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore = {}; + uint64_t value = {}; + }; + + template <> + struct CppType + { + using Type = LatencySleepInfoNV; + }; + + struct LatencySleepModeInfoNV + { + using NativeType = VkLatencySleepModeInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepModeInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ = {}, + uint32_t minimumIntervalUs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , lowLatencyMode( lowLatencyMode_ ) + , lowLatencyBoost( lowLatencyBoost_ ) + , minimumIntervalUs( minimumIntervalUs_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySleepModeInfoNV( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySleepModeInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + LatencySleepModeInfoNV & operator=( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencySleepModeInfoNV & operator=( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyMode( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ ) VULKAN_HPP_NOEXCEPT + { + lowLatencyMode = lowLatencyMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyBoost( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ ) VULKAN_HPP_NOEXCEPT + { + lowLatencyBoost = lowLatencyBoost_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setMinimumIntervalUs( uint32_t minimumIntervalUs_ ) VULKAN_HPP_NOEXCEPT + { + minimumIntervalUs = minimumIntervalUs_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencySleepModeInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySleepModeInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lowLatencyMode, lowLatencyBoost, minimumIntervalUs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySleepModeInfoNV const & ) const = default; +#else + bool operator==( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lowLatencyMode == rhs.lowLatencyMode ) && ( lowLatencyBoost == rhs.lowLatencyBoost ) && + ( minimumIntervalUs == rhs.minimumIntervalUs ); +# endif + } + + bool operator!=( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepModeInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost = {}; + uint32_t minimumIntervalUs = {}; + }; + + template <> + struct CppType + { + using Type = LatencySleepModeInfoNV; + }; + + struct LatencySubmissionPresentIdNV + { + using NativeType = VkLatencySubmissionPresentIdNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySubmissionPresentIdNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( uint64_t presentID_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentID( presentID_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySubmissionPresentIdNV( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySubmissionPresentIdNV( *reinterpret_cast( &rhs ) ) + { + } + + LatencySubmissionPresentIdNV & operator=( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencySubmissionPresentIdNV & operator=( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencySubmissionPresentIdNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySubmissionPresentIdNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentID ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySubmissionPresentIdNV const & ) const = default; +#else + bool operator==( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ); +# endif + } + + bool operator!=( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySubmissionPresentIdNV; + const void * pNext = {}; + uint64_t presentID = {}; + }; + + template <> + struct CppType + { + using Type = LatencySubmissionPresentIdNV; + }; + + struct LatencySurfaceCapabilitiesNV + { + using NativeType = VkLatencySurfaceCapabilitiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySurfaceCapabilitiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( uint32_t presentModeCount_ = {}, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentModeCount( presentModeCount_ ) + , pPresentModes( pPresentModes_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySurfaceCapabilitiesNV( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySurfaceCapabilitiesNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LatencySurfaceCapabilitiesNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + LatencySurfaceCapabilitiesNV & operator=( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencySurfaceCapabilitiesNV & operator=( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = presentModeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LatencySurfaceCapabilitiesNV & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencySurfaceCapabilitiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySurfaceCapabilitiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeCount, pPresentModes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySurfaceCapabilitiesNV const & ) const = default; +#else + bool operator==( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif + } + + bool operator!=( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySurfaceCapabilitiesNV; + const void * pNext = {}; + uint32_t presentModeCount = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; + }; + + template <> + struct CppType + { + using Type = LatencySurfaceCapabilitiesNV; + }; + struct LayerProperties { using NativeType = VkLayerProperties; @@ -52700,6 +53576,103 @@ namespace VULKAN_HPP_NAMESPACE using Type = OpticalFlowSessionCreatePrivateDataInfoNV; }; + struct OutOfBandQueueTypeInfoNV + { + using NativeType = VkOutOfBandQueueTypeInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOutOfBandQueueTypeInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , queueType( queueType_ ) + { + } + + VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OutOfBandQueueTypeInfoNV( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OutOfBandQueueTypeInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OutOfBandQueueTypeInfoNV & operator=( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + OutOfBandQueueTypeInfoNV & operator=( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setQueueType( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ ) VULKAN_HPP_NOEXCEPT + { + queueType = queueType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkOutOfBandQueueTypeInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOutOfBandQueueTypeInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OutOfBandQueueTypeInfoNV const & ) const = default; +#else + bool operator==( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueType == rhs.queueType ); +# endif + } + + bool operator!=( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOutOfBandQueueTypeInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender; + }; + + template <> + struct CppType + { + using Type = OutOfBandQueueTypeInfoNV; + }; + struct PastPresentationTimingGOOGLE { using NativeType = VkPastPresentationTimingGOOGLE; @@ -61603,6 +62576,209 @@ namespace VULKAN_HPP_NAMESPACE }; using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct PhysicalDeviceExternalFormatResolveFeaturesANDROID + { + using NativeType = VkPhysicalDeviceExternalFormatResolveFeaturesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , externalFormatResolve( externalFormatResolve_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFormatResolveFeaturesANDROID( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFormatResolveFeaturesANDROID( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFormatResolveFeaturesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFormatResolveFeaturesANDROID & + operator=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalFormatResolveFeaturesANDROID & operator=( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & + setExternalFormatResolve( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ ) VULKAN_HPP_NOEXCEPT + { + externalFormatResolve = externalFormatResolve_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalFormatResolve ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormatResolve == rhs.externalFormatResolve ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFormatResolveFeaturesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct PhysicalDeviceExternalFormatResolvePropertiesANDROID + { + using NativeType = VkPhysicalDeviceExternalFormatResolvePropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolvePropertiesANDROID( + VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , nullColorAttachmentWithExternalFormatResolve( nullColorAttachmentWithExternalFormatResolve_ ) + , externalFormatResolveChromaOffsetX( externalFormatResolveChromaOffsetX_ ) + , externalFormatResolveChromaOffsetY( externalFormatResolveChromaOffsetY_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFormatResolvePropertiesANDROID( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFormatResolvePropertiesANDROID( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFormatResolvePropertiesANDROID & + operator=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalFormatResolvePropertiesANDROID & operator=( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nullColorAttachmentWithExternalFormatResolve, externalFormatResolveChromaOffsetX, externalFormatResolveChromaOffsetY ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( nullColorAttachmentWithExternalFormatResolve == rhs.nullColorAttachmentWithExternalFormatResolve ) && + ( externalFormatResolveChromaOffsetX == rhs.externalFormatResolveChromaOffsetX ) && + ( externalFormatResolveChromaOffsetY == rhs.externalFormatResolveChromaOffsetY ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFormatResolvePropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + struct PhysicalDeviceExternalImageFormatInfo { using NativeType = VkPhysicalDeviceExternalImageFormatInfo; @@ -101178,6 +102354,112 @@ namespace VULKAN_HPP_NAMESPACE }; using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + struct SetLatencyMarkerInfoNV + { + using NativeType = VkSetLatencyMarkerInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetLatencyMarkerInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( uint64_t presentID_ = {}, + VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentID( presentID_ ) + , marker( marker_ ) + { + } + + VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetLatencyMarkerInfoNV( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SetLatencyMarkerInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + SetLatencyMarkerInfoNV & operator=( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SetLatencyMarkerInfoNV & operator=( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setMarker( VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentID, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetLatencyMarkerInfoNV const & ) const = default; +#else + bool operator==( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetLatencyMarkerInfoNV; + const void * pNext = {}; + uint64_t presentID = {}; + VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart; + }; + + template <> + struct CppType + { + using Type = SetLatencyMarkerInfoNV; + }; + struct SetStateFlagsIndirectCommandNV { using NativeType = VkSetStateFlagsIndirectCommandNV; @@ -105783,6 +107065,102 @@ namespace VULKAN_HPP_NAMESPACE using Type = SwapchainDisplayNativeHdrCreateInfoAMD; }; + struct SwapchainLatencyCreateInfoNV + { + using NativeType = VkSwapchainLatencyCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainLatencyCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , latencyModeEnable( latencyModeEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainLatencyCreateInfoNV( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainLatencyCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainLatencyCreateInfoNV & operator=( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainLatencyCreateInfoNV & operator=( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setLatencyModeEnable( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ ) VULKAN_HPP_NOEXCEPT + { + latencyModeEnable = latencyModeEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainLatencyCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainLatencyCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, latencyModeEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainLatencyCreateInfoNV const & ) const = default; +#else + bool operator==( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( latencyModeEnable == rhs.latencyModeEnable ); +# endif + } + + bool operator!=( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainLatencyCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainLatencyCreateInfoNV; + }; + struct SwapchainPresentBarrierCreateInfoNV { using NativeType = VkSwapchainPresentBarrierCreateInfoNV; -- cgit v1.2.3